identifier
stringlengths
0
89
parameters
stringlengths
0
399
return_statement
stringlengths
0
982
docstring
stringlengths
10
3.04k
docstring_summary
stringlengths
0
3.04k
function
stringlengths
13
25.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
argument_list
null
language
stringclasses
3 values
docstring_language
stringclasses
4 values
docstring_language_predictions
stringclasses
4 values
is_langid_reliable
stringclasses
2 values
is_langid_extra_reliable
bool
1 class
type
stringclasses
9 values
__full_text_cleaning
(text)
return text
Text wird als ein String betrachtet und gesäubert Das ist der erste Schritt. Args: text (str): Argument Text raw Returns: str: gesäuberten Text
Text wird als ein String betrachtet und gesäubert Das ist der erste Schritt.
def __full_text_cleaning(text): """Text wird als ein String betrachtet und gesäubert Das ist der erste Schritt. Args: text (str): Argument Text raw Returns: str: gesäuberten Text """ text = __url_cleaning(text) text = __separate_commas(text) text = __special_char_cleaning(text) text = __delete_multi_letters(text) text = __remove_square_brackets(text) text = __parenthesis_cleaning(text) text = __clean_sub_points(text) text = __special_char_fixes(text) text = __tokenize_numbers(text) return text
[ "def", "__full_text_cleaning", "(", "text", ")", ":", "text", "=", "__url_cleaning", "(", "text", ")", "text", "=", "__separate_commas", "(", "text", ")", "text", "=", "__special_char_cleaning", "(", "text", ")", "text", "=", "__delete_multi_letters", "(", "text", ")", "text", "=", "__remove_square_brackets", "(", "text", ")", "text", "=", "__parenthesis_cleaning", "(", "text", ")", "text", "=", "__clean_sub_points", "(", "text", ")", "text", "=", "__special_char_fixes", "(", "text", ")", "text", "=", "__tokenize_numbers", "(", "text", ")", "return", "text" ]
[ 70, 0 ]
[ 91, 15 ]
null
python
de
['de', 'de', 'de']
True
true
null
MQTTclass.decodeMsgObj
(self, msgObj, basetopicReplace=True )
return result
Payload des msgObj wenn möglich nach string umwandeln. wenn json im string dann nach dict umwandeln Parameters ---------- msg: MQTTMessage umzuwandelndes message Object basetopicReplace: bool <basetopic> aus topic entfernen Returns ------- result: dict .. code:: { "topic" "payload" "_decodeMsgObj" # nur wenn Fehler aufgetaucht sind }
Payload des msgObj wenn möglich nach string umwandeln.
def decodeMsgObj(self, msgObj, basetopicReplace=True ): """Payload des msgObj wenn möglich nach string umwandeln. wenn json im string dann nach dict umwandeln Parameters ---------- msg: MQTTMessage umzuwandelndes message Object basetopicReplace: bool <basetopic> aus topic entfernen Returns ------- result: dict .. code:: { "topic" "payload" "_decodeMsgObj" # nur wenn Fehler aufgetaucht sind } """ result = { "payload" : "" } #print( "# decodeMsgObj", msgObj.topic, type( msgObj.payload ), msgObj.payload ) if isinstance(msgObj, (dict)): result["topic"] = msgObj["topic"] result["payload"] = msgObj["payload"] else: result = { "topic" : msgObj.topic, "payload" : msgObj.payload } if basetopicReplace: result["topic"] = result["topic"].replace( "{}/".format( self.defaults["basetopic"] ), "" ) if type( result["payload"] ) == bytes: try: result["payload"] = result["payload"].decode('utf-8') except Exception: # pragma: no cover result["_decodeMsgObj"] = "byte.decode error" if type( result["payload"] ) == str and len(result["payload"]) > 0 and result["payload"][0] == "{": try: result["payload"] = json.loads( result["payload"] ) except ValueError: # pragma: no cover result["_decodeMsgObj"] = "json.loads error" return result
[ "def", "decodeMsgObj", "(", "self", ",", "msgObj", ",", "basetopicReplace", "=", "True", ")", ":", "result", "=", "{", "\"payload\"", ":", "\"\"", "}", "#print( \"# decodeMsgObj\", msgObj.topic, type( msgObj.payload ), msgObj.payload )", "if", "isinstance", "(", "msgObj", ",", "(", "dict", ")", ")", ":", "result", "[", "\"topic\"", "]", "=", "msgObj", "[", "\"topic\"", "]", "result", "[", "\"payload\"", "]", "=", "msgObj", "[", "\"payload\"", "]", "else", ":", "result", "=", "{", "\"topic\"", ":", "msgObj", ".", "topic", ",", "\"payload\"", ":", "msgObj", ".", "payload", "}", "if", "basetopicReplace", ":", "result", "[", "\"topic\"", "]", "=", "result", "[", "\"topic\"", "]", ".", "replace", "(", "\"{}/\"", ".", "format", "(", "self", ".", "defaults", "[", "\"basetopic\"", "]", ")", ",", "\"\"", ")", "if", "type", "(", "result", "[", "\"payload\"", "]", ")", "==", "bytes", ":", "try", ":", "result", "[", "\"payload\"", "]", "=", "result", "[", "\"payload\"", "]", ".", "decode", "(", "'utf-8'", ")", "except", "Exception", ":", "# pragma: no cover", "result", "[", "\"_decodeMsgObj\"", "]", "=", "\"byte.decode error\"", "if", "type", "(", "result", "[", "\"payload\"", "]", ")", "==", "str", "and", "len", "(", "result", "[", "\"payload\"", "]", ")", ">", "0", "and", "result", "[", "\"payload\"", "]", "[", "0", "]", "==", "\"{\"", ":", "try", ":", "result", "[", "\"payload\"", "]", "=", "json", ".", "loads", "(", "result", "[", "\"payload\"", "]", ")", "except", "ValueError", ":", "# pragma: no cover", "result", "[", "\"_decodeMsgObj\"", "]", "=", "\"json.loads error\"", "return", "result" ]
[ 373, 4 ]
[ 426, 21 ]
null
python
de
['de', 'de', 'de']
True
true
null
qa_mlc.FWHM_plot_error
(self, data, size:dict={}, plotTitle:str="", leaf_from:int=1 )
return self.getPlot()
Barchart mit allen Leafpaaren 0-60 anzeigen mit der Öffnungsbreite (fwxm) und der Verschiebung vom Zentrum (shift) Es werden zwei barPlots jeweils für x1 und x2 angelegt Attributes ---------- data: dict fwxm.data : array shift.data : array size: dict, optional Größe des Chart. The default is {}. plotTitle:str, optional Titel des Plots. The default is "". leaf_from : int, optional Nummer des ersten auzuwertenden leaf. The default is 1. Returns ------- Rückgabe von getPlot()
Barchart mit allen Leafpaaren 0-60 anzeigen mit der Öffnungsbreite (fwxm) und der Verschiebung vom Zentrum (shift)
def FWHM_plot_error(self, data, size:dict={}, plotTitle:str="", leaf_from:int=1 ): """Barchart mit allen Leafpaaren 0-60 anzeigen mit der Öffnungsbreite (fwxm) und der Verschiebung vom Zentrum (shift) Es werden zwei barPlots jeweils für x1 und x2 angelegt Attributes ---------- data: dict fwxm.data : array shift.data : array size: dict, optional Größe des Chart. The default is {}. plotTitle:str, optional Titel des Plots. The default is "". leaf_from : int, optional Nummer des ersten auzuwertenden leaf. The default is 1. Returns ------- Rückgabe von getPlot() """ virtLeafSize = 2.5 error = 1.5 limit = 2 * (error + virtLeafSize) # Chart Titel wenn nicht angegeben if plotTitle == "": plotTitle = "lfd:{lfd:d} G:{gantry:01.1f} K:{collimator:01.1f}" fig, ax = self.initPlot( size, False, nrows=1, ncols=1 ) ax.set_title( plotTitle.format( **data, position=(0.5, 1.05) ) ) plot = { "num" : [], "x1": [], "x2": [], } positions = [] leaf = leaf_from for k, v in data['fwxm.data'].items(): shift = data['shift.data'][ k ] plot["num"].append( leaf ) # position und shift positions.append(k) v = ( (v - 50) / 2 ) + virtLeafSize plot["x1"].append( v + shift ) plot["x2"].append( -1 * v + shift ) # nächster leaf leaf += 1 # x1 und x2 plotten beide in blue ax.bar(plot["num"], plot["x1"], color="#0343dfCC", linewidth=1) ax.bar(plot["num"], plot["x2"], color="#0343dfCC", linewidth=1) ax.set_ylim( -1 * limit, limit ) ax.axhline(0, color='k', linewidth = 0.5) ax.axhline(virtLeafSize, color='k', linewidth = 0.2) ax.axhline( -1 * virtLeafSize, color='k', linewidth = 0.2) ax.axhline( error + virtLeafSize, color='r', linewidth = 0.5) ax.axhline( -1 * (error + virtLeafSize), color='r', linewidth = 0.5) ax.set_xticks( [1,10,30,50,60] ) ax.set_axisbelow(True) ax.grid( True ) plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0) return self.getPlot()
[ "def", "FWHM_plot_error", "(", "self", ",", "data", ",", "size", ":", "dict", "=", "{", "}", ",", "plotTitle", ":", "str", "=", "\"\"", ",", "leaf_from", ":", "int", "=", "1", ")", ":", "virtLeafSize", "=", "2.5", "error", "=", "1.5", "limit", "=", "2", "*", "(", "error", "+", "virtLeafSize", ")", "# Chart Titel wenn nicht angegeben", "if", "plotTitle", "==", "\"\"", ":", "plotTitle", "=", "\"lfd:{lfd:d} G:{gantry:01.1f} K:{collimator:01.1f}\"", "fig", ",", "ax", "=", "self", ".", "initPlot", "(", "size", ",", "False", ",", "nrows", "=", "1", ",", "ncols", "=", "1", ")", "ax", ".", "set_title", "(", "plotTitle", ".", "format", "(", "*", "*", "data", ",", "position", "=", "(", "0.5", ",", "1.05", ")", ")", ")", "plot", "=", "{", "\"num\"", ":", "[", "]", ",", "\"x1\"", ":", "[", "]", ",", "\"x2\"", ":", "[", "]", ",", "}", "positions", "=", "[", "]", "leaf", "=", "leaf_from", "for", "k", ",", "v", "in", "data", "[", "'fwxm.data'", "]", ".", "items", "(", ")", ":", "shift", "=", "data", "[", "'shift.data'", "]", "[", "k", "]", "plot", "[", "\"num\"", "]", ".", "append", "(", "leaf", ")", "# position und shift", "positions", ".", "append", "(", "k", ")", "v", "=", "(", "(", "v", "-", "50", ")", "/", "2", ")", "+", "virtLeafSize", "plot", "[", "\"x1\"", "]", ".", "append", "(", "v", "+", "shift", ")", "plot", "[", "\"x2\"", "]", ".", "append", "(", "-", "1", "*", "v", "+", "shift", ")", "# nächster leaf", "leaf", "+=", "1", "# x1 und x2 plotten beide in blue", "ax", ".", "bar", "(", "plot", "[", "\"num\"", "]", ",", "plot", "[", "\"x1\"", "]", ",", "color", "=", "\"#0343dfCC\"", ",", "linewidth", "=", "1", ")", "ax", ".", "bar", "(", "plot", "[", "\"num\"", "]", ",", "plot", "[", "\"x2\"", "]", ",", "color", "=", "\"#0343dfCC\"", ",", "linewidth", "=", "1", ")", "ax", ".", "set_ylim", "(", "-", "1", "*", "limit", ",", "limit", ")", "ax", ".", "axhline", "(", "0", ",", "color", "=", "'k'", ",", "linewidth", "=", "0.5", ")", "ax", ".", "axhline", "(", "virtLeafSize", ",", "color", "=", "'k'", ",", "linewidth", "=", "0.2", ")", "ax", ".", "axhline", "(", "-", "1", "*", "virtLeafSize", ",", "color", "=", "'k'", ",", "linewidth", "=", "0.2", ")", "ax", ".", "axhline", "(", "error", "+", "virtLeafSize", ",", "color", "=", "'r'", ",", "linewidth", "=", "0.5", ")", "ax", ".", "axhline", "(", "-", "1", "*", "(", "error", "+", "virtLeafSize", ")", ",", "color", "=", "'r'", ",", "linewidth", "=", "0.5", ")", "ax", ".", "set_xticks", "(", "[", "1", ",", "10", ",", "30", ",", "50", ",", "60", "]", ")", "ax", ".", "set_axisbelow", "(", "True", ")", "ax", ".", "grid", "(", "True", ")", "plt", ".", "tight_layout", "(", "pad", "=", "0.4", ",", "w_pad", "=", "0.5", ",", "h_pad", "=", "1.0", ")", "return", "self", ".", "getPlot", "(", ")" ]
[ 332, 4 ]
[ 405, 29 ]
null
python
de
['de', 'de', 'de']
True
true
null
qa_wl.findCenterBall
(self, key)
return self.fields[ key ]["centerBall"]
Zentrum der Kugel im Isozentrum bestimmen Die gefundene Position wird in self.fields eingefügt Zusätzlich wird die Position im centers abgelegt
Zentrum der Kugel im Isozentrum bestimmen Die gefundene Position wird in self.fields eingefügt Zusätzlich wird die Position im centers abgelegt
def findCenterBall(self, key): """ Zentrum der Kugel im Isozentrum bestimmen Die gefundene Position wird in self.fields eingefügt Zusätzlich wird die Position im centers abgelegt """ if not key in self.fields: return None info = self.fields[ key ] field = qa_field( info ) # das image auf den wichtigen Bereich beschneiden imageArray = field.image.cropField( self.roi ) # imageArray invertieren img_inv = -imageArray + imageArray.max() + imageArray.min() # centerBall # labeled_foreground = self._getThresholdMask( img_inv, [99, 99.9] ) properties = measure.regionprops( labeled_foreground ) # das gefundene Zentrum der Kugel in dots # es könnten mehrere objekte vorhanden sein # in unseren Bildern ist aber nur eins deshalb das erste verwenden centerBallDots = Point( properties[0].centroid[1], properties[0].centroid[0] ) centerBallPositon = field.image.dots2mm( centerBallDots ) # FIXME: ist center abziehen OK? # da Point kein + kann x und y seperat self.fields[ key ]["centerBall"] = Point(centerBallPositon.x - self.virtualCenter.x, centerBallPositon.y - self.virtualCenter.y ) # gefundene Position im centers ablegen centerBall = self.fields[ key ]["centerBall"] self.centers["all"][key] = centerBall #print( info["gantry"], info["collimator"], info["table"] ) g = float(info["gantry"]) c = float(info["collimator"]) t = float(info["table"]) if g == 0 and c == 0 and t == 0: self.centers["G"][0.0] = centerBall self._mergeArray("G", imageArray ) self.centers["C"][0.0] = centerBall self._mergeArray("C", imageArray ) self.centers["T"][0.0] = centerBall self._mergeArray("T", imageArray ) elif c == 0.0 and t == 0: if g > 180 or info["GantryRtnExt"]=="EN": self.centers["G"][ g - 360 ] = centerBall else: self.centers["G"][ g ] = centerBall self._mergeArray("G", imageArray ) elif g == 0 and t == 0: if c > 180: self.centers["C"][ c - 360 ] = centerBall else: self.centers["C"][ c ] = centerBall self._mergeArray("C", imageArray ) elif g == 0 and c == 0: if t > 180: self.centers["T"][ t - 360 ] = centerBall else: self.centers["T"][ t ] = centerBall self._mergeArray("T", imageArray ) if self.debug: print("findCenterBall", self.virtualCenter, centerBallPositon, centerBall) # plot images zum debuggen plots = { 'Original': imageArray, 'Labels': labeled_foreground } fig, ax = plt.subplots(1, len(plots)) for n, (title, img) in enumerate(plots.items()): cmap = plt.cm.gnuplot if n == len(plots) - 1 else plt.cm.gray ax[n].imshow(img, cmap=cmap) ax[n].axis('off') ax[n].set_title(title) ax[n].plot( centerBallDots.x, centerBallDots.y, 'r+', ms=80, markeredgewidth=1 ) ax[n].plot( len(img)/2, len(img)/2, 'y+', ms=100, markeredgewidth=1 ) plt.show(fig) pass return self.fields[ key ]["centerBall"]
[ "def", "findCenterBall", "(", "self", ",", "key", ")", ":", "if", "not", "key", "in", "self", ".", "fields", ":", "return", "None", "info", "=", "self", ".", "fields", "[", "key", "]", "field", "=", "qa_field", "(", "info", ")", "# das image auf den wichtigen Bereich beschneiden", "imageArray", "=", "field", ".", "image", ".", "cropField", "(", "self", ".", "roi", ")", "# imageArray invertieren", "img_inv", "=", "-", "imageArray", "+", "imageArray", ".", "max", "(", ")", "+", "imageArray", ".", "min", "(", ")", "# centerBall", "#", "labeled_foreground", "=", "self", ".", "_getThresholdMask", "(", "img_inv", ",", "[", "99", ",", "99.9", "]", ")", "properties", "=", "measure", ".", "regionprops", "(", "labeled_foreground", ")", "# das gefundene Zentrum der Kugel in dots", "# es könnten mehrere objekte vorhanden sein", "# in unseren Bildern ist aber nur eins deshalb das erste verwenden", "centerBallDots", "=", "Point", "(", "properties", "[", "0", "]", ".", "centroid", "[", "1", "]", ",", "properties", "[", "0", "]", ".", "centroid", "[", "0", "]", ")", "centerBallPositon", "=", "field", ".", "image", ".", "dots2mm", "(", "centerBallDots", ")", "# FIXME: ist center abziehen OK?", "# da Point kein + kann x und y seperat ", "self", ".", "fields", "[", "key", "]", "[", "\"centerBall\"", "]", "=", "Point", "(", "centerBallPositon", ".", "x", "-", "self", ".", "virtualCenter", ".", "x", ",", "centerBallPositon", ".", "y", "-", "self", ".", "virtualCenter", ".", "y", ")", "# gefundene Position im centers ablegen", "centerBall", "=", "self", ".", "fields", "[", "key", "]", "[", "\"centerBall\"", "]", "self", ".", "centers", "[", "\"all\"", "]", "[", "key", "]", "=", "centerBall", "#print( info[\"gantry\"], info[\"collimator\"], info[\"table\"] )", "g", "=", "float", "(", "info", "[", "\"gantry\"", "]", ")", "c", "=", "float", "(", "info", "[", "\"collimator\"", "]", ")", "t", "=", "float", "(", "info", "[", "\"table\"", "]", ")", "if", "g", "==", "0", "and", "c", "==", "0", "and", "t", "==", "0", ":", "self", ".", "centers", "[", "\"G\"", "]", "[", "0.0", "]", "=", "centerBall", "self", ".", "_mergeArray", "(", "\"G\"", ",", "imageArray", ")", "self", ".", "centers", "[", "\"C\"", "]", "[", "0.0", "]", "=", "centerBall", "self", ".", "_mergeArray", "(", "\"C\"", ",", "imageArray", ")", "self", ".", "centers", "[", "\"T\"", "]", "[", "0.0", "]", "=", "centerBall", "self", ".", "_mergeArray", "(", "\"T\"", ",", "imageArray", ")", "elif", "c", "==", "0.0", "and", "t", "==", "0", ":", "if", "g", ">", "180", "or", "info", "[", "\"GantryRtnExt\"", "]", "==", "\"EN\"", ":", "self", ".", "centers", "[", "\"G\"", "]", "[", "g", "-", "360", "]", "=", "centerBall", "else", ":", "self", ".", "centers", "[", "\"G\"", "]", "[", "g", "]", "=", "centerBall", "self", ".", "_mergeArray", "(", "\"G\"", ",", "imageArray", ")", "elif", "g", "==", "0", "and", "t", "==", "0", ":", "if", "c", ">", "180", ":", "self", ".", "centers", "[", "\"C\"", "]", "[", "c", "-", "360", "]", "=", "centerBall", "else", ":", "self", ".", "centers", "[", "\"C\"", "]", "[", "c", "]", "=", "centerBall", "self", ".", "_mergeArray", "(", "\"C\"", ",", "imageArray", ")", "elif", "g", "==", "0", "and", "c", "==", "0", ":", "if", "t", ">", "180", ":", "self", ".", "centers", "[", "\"T\"", "]", "[", "t", "-", "360", "]", "=", "centerBall", "else", ":", "self", ".", "centers", "[", "\"T\"", "]", "[", "t", "]", "=", "centerBall", "self", ".", "_mergeArray", "(", "\"T\"", ",", "imageArray", ")", "if", "self", ".", "debug", ":", "print", "(", "\"findCenterBall\"", ",", "self", ".", "virtualCenter", ",", "centerBallPositon", ",", "centerBall", ")", "# plot images zum debuggen", "plots", "=", "{", "'Original'", ":", "imageArray", ",", "'Labels'", ":", "labeled_foreground", "}", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", "1", ",", "len", "(", "plots", ")", ")", "for", "n", ",", "(", "title", ",", "img", ")", "in", "enumerate", "(", "plots", ".", "items", "(", ")", ")", ":", "cmap", "=", "plt", ".", "cm", ".", "gnuplot", "if", "n", "==", "len", "(", "plots", ")", "-", "1", "else", "plt", ".", "cm", ".", "gray", "ax", "[", "n", "]", ".", "imshow", "(", "img", ",", "cmap", "=", "cmap", ")", "ax", "[", "n", "]", ".", "axis", "(", "'off'", ")", "ax", "[", "n", "]", ".", "set_title", "(", "title", ")", "ax", "[", "n", "]", ".", "plot", "(", "centerBallDots", ".", "x", ",", "centerBallDots", ".", "y", ",", "'r+'", ",", "ms", "=", "80", ",", "markeredgewidth", "=", "1", ")", "ax", "[", "n", "]", ".", "plot", "(", "len", "(", "img", ")", "/", "2", ",", "len", "(", "img", ")", "/", "2", ",", "'y+'", ",", "ms", "=", "100", ",", "markeredgewidth", "=", "1", ")", "plt", ".", "show", "(", "fig", ")", "pass", "return", "self", ".", "fields", "[", "key", "]", "[", "\"centerBall\"", "]" ]
[ 357, 4 ]
[ 450, 47 ]
null
python
de
['de', 'de', 'de']
True
true
null
random_text
(values, data)
Sucht aus mehreren Strings (Array in pattern) zufällig einen aus. :param values: Werte aus der JSON-Datei :param data: Daten aus der API
Sucht aus mehreren Strings (Array in pattern) zufällig einen aus.
def random_text(values, data): """Sucht aus mehreren Strings (Array in pattern) zufällig einen aus. :param values: Werte aus der JSON-Datei :param data: Daten aus der API """ len_pattern = len(values["pattern"]) if len_pattern == 1: return data.format(values["pattern"][0], values), True else: rand = randint(0, len_pattern - 1) return data.format(values["pattern"][rand], values), True
[ "def", "random_text", "(", "values", ",", "data", ")", ":", "len_pattern", "=", "len", "(", "values", "[", "\"pattern\"", "]", ")", "if", "len_pattern", "==", "1", ":", "return", "data", ".", "format", "(", "values", "[", "\"pattern\"", "]", "[", "0", "]", ",", "values", ")", ",", "True", "else", ":", "rand", "=", "randint", "(", "0", ",", "len_pattern", "-", "1", ")", "return", "data", ".", "format", "(", "values", "[", "\"pattern\"", "]", "[", "rand", "]", ",", "values", ")", ",", "True" ]
[ 89, 0 ]
[ 100, 65 ]
null
python
de
['de', 'de', 'de']
True
true
null
SignalingGame
()
Eine populationsdynamische Simulation des 'demand game'.
Eine populationsdynamische Simulation des 'demand game'.
def SignalingGame(): """Eine populationsdynamische Simulation des 'demand game'. """ # Open a window for graphics output. gfx = GfxDriver.Window(title = "Sender - Receiver Game") # Generate a dynamics function from the payoff table. dynFunc = Dynamics.GenDynamicsFunction(payoff_table, e=0.0,noise=0.0) # Set the graph for plotting the plotting dynamics. graph = Graph.Cartesian(gfx, 0., 0., 10., 1., "Sender - Receiver Game", "generations", "population share") for name in PlayerNames: graph.addPen(name) # Calculate the population dynamics and plot the graph. population = Dynamics.RandomDistribution(len(PlayerNames)) for g in range(51): for i in range(len(PlayerNames)): graph.addValue(PlayerNames[i], g, population[i]) population = dynFunc(population) if g % 10 == 0: gfx.refresh() # Wait until the user closes the window. gfx.waitUntilClosed()
[ "def", "SignalingGame", "(", ")", ":", "# Open a window for graphics output.", "gfx", "=", "GfxDriver", ".", "Window", "(", "title", "=", "\"Sender - Receiver Game\"", ")", "# Generate a dynamics function from the payoff table.", "dynFunc", "=", "Dynamics", ".", "GenDynamicsFunction", "(", "payoff_table", ",", "e", "=", "0.0", ",", "noise", "=", "0.0", ")", "# Set the graph for plotting the plotting dynamics.", "graph", "=", "Graph", ".", "Cartesian", "(", "gfx", ",", "0.", ",", "0.", ",", "10.", ",", "1.", ",", "\"Sender - Receiver Game\"", ",", "\"generations\"", ",", "\"population share\"", ")", "for", "name", "in", "PlayerNames", ":", "graph", ".", "addPen", "(", "name", ")", "# Calculate the population dynamics and plot the graph.", "population", "=", "Dynamics", ".", "RandomDistribution", "(", "len", "(", "PlayerNames", ")", ")", "for", "g", "in", "range", "(", "51", ")", ":", "for", "i", "in", "range", "(", "len", "(", "PlayerNames", ")", ")", ":", "graph", ".", "addValue", "(", "PlayerNames", "[", "i", "]", ",", "g", ",", "population", "[", "i", "]", ")", "population", "=", "dynFunc", "(", "population", ")", "if", "g", "%", "10", "==", "0", ":", "gfx", ".", "refresh", "(", ")", "# Wait until the user closes the window.", "gfx", ".", "waitUntilClosed", "(", ")" ]
[ 79, 0 ]
[ 108, 25 ]
null
python
de
['de', 'de', 'de']
True
true
null
HeistSystem.DB_dropTable_Targets
(self)
return True
Targets Tabelle löschen
Targets Tabelle löschen
def DB_dropTable_Targets(self): ''' Targets Tabelle löschen ''' thisActionName = "DB_dropTable_Targets" # Bestehende Tabelle löschen sql = "DROP TABLE game_heist_targets" try: self.GameDB.execute(sql) self.GameDB.commit() except Exception as e: # Fehler in Log-Datei schreiben self.Logger.WriteLog( " --- FEHLER - {0} ---".format(thisActionName)) self.Logger.WriteLog( " --- EXCEPTION: {0}".format(str(sys.exc_info()))) return False return True
[ "def", "DB_dropTable_Targets", "(", "self", ")", ":", "thisActionName", "=", "\"DB_dropTable_Targets\"", "# Bestehende Tabelle löschen\r", "sql", "=", "\"DROP TABLE game_heist_targets\"", "try", ":", "self", ".", "GameDB", ".", "execute", "(", "sql", ")", "self", ".", "GameDB", ".", "commit", "(", ")", "except", "Exception", "as", "e", ":", "# Fehler in Log-Datei schreiben\r", "self", ".", "Logger", ".", "WriteLog", "(", "\" --- FEHLER - {0} ---\"", ".", "format", "(", "thisActionName", ")", ")", "self", ".", "Logger", ".", "WriteLog", "(", "\" --- EXCEPTION: {0}\"", ".", "format", "(", "str", "(", "sys", ".", "exc_info", "(", ")", ")", ")", ")", "return", "False", "return", "True" ]
[ 785, 4 ]
[ 806, 19 ]
null
python
de
['de', 'de', 'de']
True
true
null
MQTTclass.warning
(self, msg)
Warnung über looging oder print. logging.warning nur wenn self.logging=True Wenn MQTT handler hat dann dorthin sonst auf die console Parameters ---------- msg : str Auszugebende Nachricht Returns ------- None.
Warnung über looging oder print.
def warning(self, msg): """Warnung über looging oder print. logging.warning nur wenn self.logging=True Wenn MQTT handler hat dann dorthin sonst auf die console Parameters ---------- msg : str Auszugebende Nachricht Returns ------- None. """ if self.logging == True: logger = logging.getLogger( "MQTT" ) if logger.hasHandlers(): logger.warning( msg ) else: print("### MQTT WARNING", msg)
[ "def", "warning", "(", "self", ",", "msg", ")", ":", "if", "self", ".", "logging", "==", "True", ":", "logger", "=", "logging", ".", "getLogger", "(", "\"MQTT\"", ")", "if", "logger", ".", "hasHandlers", "(", ")", ":", "logger", ".", "warning", "(", "msg", ")", "else", ":", "print", "(", "\"### MQTT WARNING\"", ",", "msg", ")" ]
[ 250, 4 ]
[ 272, 46 ]
null
python
de
['de', 'de', 'de']
True
true
null
HeistSystem.chat_WriteTextMessage
(self, messageText)
return
Schreibt eine Nachricht in den Chat
Schreibt eine Nachricht in den Chat
def chat_WriteTextMessage(self, messageText): ''' Schreibt eine Nachricht in den Chat ''' thisActionName = "chat_WriteTextMessage" # Text in den Chat schreiben self.Parent.SendStreamMessage( "/me : {messagetext}".format( messagetext=str(messageText) ) ) return
[ "def", "chat_WriteTextMessage", "(", "self", ",", "messageText", ")", ":", "thisActionName", "=", "\"chat_WriteTextMessage\"", "# Text in den Chat schreiben\r", "self", ".", "Parent", ".", "SendStreamMessage", "(", "\"/me : {messagetext}\"", ".", "format", "(", "messagetext", "=", "str", "(", "messageText", ")", ")", ")", "return" ]
[ 1027, 4 ]
[ 1038, 14 ]
null
python
de
['de', 'de', 'de']
True
true
null
update_data
(attrname, old, new)
Update der Daten sowie der Beschriftungen
Update der Daten sowie der Beschriftungen
def update_data(attrname, old, new): """Update der Daten sowie der Beschriftungen""" # Scatter Diagramm scatter.xaxis.axis_label = select_x.value scatter.yaxis.axis_label = select_y.value x = select_x.value y = select_y.value source_scatter.data = dict(x=tips[x], y= tips[y]) # Säulendiagramm data_cat = tips[select_cat.value] summary = data_cat.value_counts() bar.x_range.factors = list(summary.index) source_kat.data = dict(x=list(summary.index), top=summary.values) bar.xaxis.axis_label = select_cat.value # Historamm data_hist = tips[select_hist.value] top_hist_new, x_hist_new = np.histogram(data_hist) source_hist.data = dict(x= x_hist_new[:-1], top=top_hist_new) hist.xaxis.axis_label = select_hist.value
[ "def", "update_data", "(", "attrname", ",", "old", ",", "new", ")", ":", "# Scatter Diagramm", "scatter", ".", "xaxis", ".", "axis_label", "=", "select_x", ".", "value", "scatter", ".", "yaxis", ".", "axis_label", "=", "select_y", ".", "value", "x", "=", "select_x", ".", "value", "y", "=", "select_y", ".", "value", "source_scatter", ".", "data", "=", "dict", "(", "x", "=", "tips", "[", "x", "]", ",", "y", "=", "tips", "[", "y", "]", ")", "# Säulendiagramm", "data_cat", "=", "tips", "[", "select_cat", ".", "value", "]", "summary", "=", "data_cat", ".", "value_counts", "(", ")", "bar", ".", "x_range", ".", "factors", "=", "list", "(", "summary", ".", "index", ")", "source_kat", ".", "data", "=", "dict", "(", "x", "=", "list", "(", "summary", ".", "index", ")", ",", "top", "=", "summary", ".", "values", ")", "bar", ".", "xaxis", ".", "axis_label", "=", "select_cat", ".", "value", "# Historamm", "data_hist", "=", "tips", "[", "select_hist", ".", "value", "]", "top_hist_new", ",", "x_hist_new", "=", "np", ".", "histogram", "(", "data_hist", ")", "source_hist", ".", "data", "=", "dict", "(", "x", "=", "x_hist_new", "[", ":", "-", "1", "]", ",", "top", "=", "top_hist_new", ")", "hist", ".", "xaxis", ".", "axis_label", "=", "select_hist", ".", "value" ]
[ 66, 0 ]
[ 84, 45 ]
null
python
de
['de', 'de', 'de']
True
true
null
transform
(values: dict, data: StepData)
Führt die unter `"type"` angegebene transform-Funktion als Schleife aus. :param values: Werte aus der JSON-Datei :param data: Daten aus der API :return:
Führt die unter `"type"` angegebene transform-Funktion als Schleife aus.
def transform(values: dict, data: StepData): """Führt die unter `"type"` angegebene transform-Funktion als Schleife aus. :param values: Werte aus der JSON-Datei :param data: Daten aus der API :return: """ for transformation in values["transform"]: transformation["_loop_states"] = values.get("_loop_states", {}) trans_func = get_type_func(transformation, TRANSFORM_TYPES) trans_func(transformation, data)
[ "def", "transform", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "for", "transformation", "in", "values", "[", "\"transform\"", "]", ":", "transformation", "[", "\"_loop_states\"", "]", "=", "values", ".", "get", "(", "\"_loop_states\"", ",", "{", "}", ")", "trans_func", "=", "get_type_func", "(", "transformation", ",", "TRANSFORM_TYPES", ")", "trans_func", "(", "transformation", ",", "data", ")" ]
[ 26, 0 ]
[ 38, 40 ]
null
python
de
['de', 'de', 'de']
True
true
null
get_temp_path
(path: str)
return get_resource_path(os.path.join(TEMP_LOCATION, path))
Erstellt einen absoluten Pfad zu der übergebenen Temp-Ressource. Erstellt den Pfad aus `RESOURCES_LOCATION`, `TEMP_LOCATION` und dem übergebenen Pfad. :param path: Pfad zur Ressource, relativ zum `resources/temp`-Ordner. :return: Absoluter Pfad zur übergebenen Ressource.
Erstellt einen absoluten Pfad zu der übergebenen Temp-Ressource.
def get_temp_path(path: str): """Erstellt einen absoluten Pfad zu der übergebenen Temp-Ressource. Erstellt den Pfad aus `RESOURCES_LOCATION`, `TEMP_LOCATION` und dem übergebenen Pfad. :param path: Pfad zur Ressource, relativ zum `resources/temp`-Ordner. :return: Absoluter Pfad zur übergebenen Ressource. """ return get_resource_path(os.path.join(TEMP_LOCATION, path))
[ "def", "get_temp_path", "(", "path", ":", "str", ")", ":", "return", "get_resource_path", "(", "os", ".", "path", ".", "join", "(", "TEMP_LOCATION", ",", "path", ")", ")" ]
[ 109, 0 ]
[ 117, 63 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispSAFRS.appDialog
(cls, title:str="", detail:dict={})
App-Dialog Informationen für den client anfügen. Diese Informationen führen zu einer Dialog Anzeige im client:: appDialog("Fehler beim anlegen", { "content" : message, "dimensions" : [ 500, 200] }) Übergabe für den client Dialog { "title" => "Fehler beim anlegen", "content" : message, "dimensions" : [ 500, 200] } Parameters ---------- title : str, optional title Bereich. The default is "". detail : str, optional Info Bereich. The default is "". ist title hier nicht angegeben wird message verwendet Returns ------- None.
App-Dialog Informationen für den client anfügen.
def appDialog(cls, title:str="", detail:dict={}): """App-Dialog Informationen für den client anfügen. Diese Informationen führen zu einer Dialog Anzeige im client:: appDialog("Fehler beim anlegen", { "content" : message, "dimensions" : [ 500, 200] }) Übergabe für den client Dialog { "title" => "Fehler beim anlegen", "content" : message, "dimensions" : [ 500, 200] } Parameters ---------- title : str, optional title Bereich. The default is "". detail : str, optional Info Bereich. The default is "". ist title hier nicht angegeben wird message verwendet Returns ------- None. """ if not "title" in detail: detail[ "title" ] = title cls._int_add_meta( "App-Dialog", title, detail )
[ "def", "appDialog", "(", "cls", ",", "title", ":", "str", "=", "\"\"", ",", "detail", ":", "dict", "=", "{", "}", ")", ":", "if", "not", "\"title\"", "in", "detail", ":", "detail", "[", "\"title\"", "]", "=", "title", "cls", ".", "_int_add_meta", "(", "\"App-Dialog\"", ",", "title", ",", "detail", ")" ]
[ 895, 4 ]
[ 925, 56 ]
null
python
de
['de', 'de', 'de']
True
true
null
calculate_add
(values: dict, data: StepData)
Die jeweiligen Werte, die in add stehen, werden zu den Werten, die in key stehen, hinzuaddiert. :param values: Werte aus der JSON-Datei :param data: Daten aus der API
Die jeweiligen Werte, die in add stehen, werden zu den Werten, die in key stehen, hinzuaddiert.
def calculate_add(values: dict, data: StepData): """Die jeweiligen Werte, die in add stehen, werden zu den Werten, die in key stehen, hinzuaddiert. :param values: Werte aus der JSON-Datei :param data: Daten aus der API """ _bi_calculate(values, data, operator.add)
[ "def", "calculate_add", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "_bi_calculate", "(", "values", ",", "data", ",", "operator", ".", "add", ")" ]
[ 249, 0 ]
[ 255, 45 ]
null
python
de
['de', 'de', 'de']
True
true
null
Pipeline.config
(self)
return self.__config
float: config der pipeline. Wird erst nach Beendigung der Pipeline initialisiert.
float: config der pipeline. Wird erst nach Beendigung der Pipeline initialisiert.
def config(self): """float: config der pipeline. Wird erst nach Beendigung der Pipeline initialisiert.""" return self.__config
[ "def", "config", "(", "self", ")", ":", "return", "self", ".", "__config" ]
[ 78, 4 ]
[ 80, 28 ]
null
python
de
['de', 'de', 'de']
True
true
null
successively
(values: dict, step_data: StepData, out_images, out_audios, out_audio_l)
Generierung des Output-Videos aus allen Bild- und Audiodateien. Generiert das Output-Video. Dazu werden alle Bild- und alle Audiodateien - in der Reihenfolge wie sie in values (in der JSON) vorliegen - aneinandergereiht. :param values: Werte aus der JSON-Datei :param step_data: Daten aus der API :return: Pfad zum Output-Video :rtype: str
Generierung des Output-Videos aus allen Bild- und Audiodateien.
def successively(values: dict, step_data: StepData, out_images, out_audios, out_audio_l): """Generierung des Output-Videos aus allen Bild- und Audiodateien. Generiert das Output-Video. Dazu werden alle Bild- und alle Audiodateien - in der Reihenfolge wie sie in values (in der JSON) vorliegen - aneinandergereiht. :param values: Werte aus der JSON-Datei :param step_data: Daten aus der API :return: Pfad zum Output-Video :rtype: str """ for image in values["images"]: out_images.append(values["images"][image]) for audio in values["audio"]["audios"]: out_audios.append(values["audio"]["audios"][audio]) out_audio_l.append(MP3(values["audio"]["audios"][audio]).info.length)
[ "def", "successively", "(", "values", ":", "dict", ",", "step_data", ":", "StepData", ",", "out_images", ",", "out_audios", ",", "out_audio_l", ")", ":", "for", "image", "in", "values", "[", "\"images\"", "]", ":", "out_images", ".", "append", "(", "values", "[", "\"images\"", "]", "[", "image", "]", ")", "for", "audio", "in", "values", "[", "\"audio\"", "]", "[", "\"audios\"", "]", ":", "out_audios", ".", "append", "(", "values", "[", "\"audio\"", "]", "[", "\"audios\"", "]", "[", "audio", "]", ")", "out_audio_l", ".", "append", "(", "MP3", "(", "values", "[", "\"audio\"", "]", "[", "\"audios\"", "]", "[", "audio", "]", ")", ".", "info", ".", "length", ")" ]
[ 91, 0 ]
[ 106, 77 ]
null
python
de
['de', 'de', 'de']
True
true
null
register_calculate
(func)
return func
Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block. Fügt eine Action-Funktion dem Dictionary CALCULATE_ACTIONS hinzu. :param func: die zu registrierende Funktion :return: Funktion mit try/except-Block
Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block. Fügt eine Action-Funktion dem Dictionary CALCULATE_ACTIONS hinzu.
def register_calculate(func): """Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block. Fügt eine Action-Funktion dem Dictionary CALCULATE_ACTIONS hinzu. :param func: die zu registrierende Funktion :return: Funktion mit try/except-Block """ CALCULATE_ACTIONS[func.__name__.replace("calculate_", "")] = func return func
[ "def", "register_calculate", "(", "func", ")", ":", "CALCULATE_ACTIONS", "[", "func", ".", "__name__", ".", "replace", "(", "\"calculate_\"", ",", "\"\"", ")", "]", "=", "func", "return", "func" ]
[ 17, 0 ]
[ 25, 15 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispBaseWebApp.routeIFrame
( self, src:str="" )
return '<div class="iframe-container overflow-hidden flex-1"><iframe src="{}" ></iframe></div>'.format( src )
Filepath in iframe anzeigen. Aufrufe:: /apiframe - api im iframe anzeigen. Mit src="/api" /dbadminframe - dbadmin im iframe anzeigen. Mit src="/dbadmin" Parameters ---------- src : str, optional src Angabe des iframe. The default is "". Returns ------- str div mit iframe
Filepath in iframe anzeigen.
def routeIFrame( self, src:str="" ): """Filepath in iframe anzeigen. Aufrufe:: /apiframe - api im iframe anzeigen. Mit src="/api" /dbadminframe - dbadmin im iframe anzeigen. Mit src="/dbadmin" Parameters ---------- src : str, optional src Angabe des iframe. The default is "". Returns ------- str div mit iframe """ return '<div class="iframe-container overflow-hidden flex-1"><iframe src="{}" ></iframe></div>'.format( src )
[ "def", "routeIFrame", "(", "self", ",", "src", ":", "str", "=", "\"\"", ")", ":", "return", "'<div class=\"iframe-container overflow-hidden flex-1\"><iframe src=\"{}\" ></iframe></div>'", ".", "format", "(", "src", ")" ]
[ 737, 4 ]
[ 756, 117 ]
null
python
de
['de', 'de', 'de']
True
true
null
Pipeline.start_time
(self)
return self.__start_time
float: Startzeit der Pipeline. Wird erst bei dem Aufruf von :func:`start` initialisiert.
float: Startzeit der Pipeline. Wird erst bei dem Aufruf von :func:`start` initialisiert.
def start_time(self): """float: Startzeit der Pipeline. Wird erst bei dem Aufruf von :func:`start` initialisiert.""" return self.__start_time
[ "def", "start_time", "(", "self", ")", ":", "return", "self", ".", "__start_time" ]
[ 68, 4 ]
[ 70, 32 ]
null
python
de
['de', 'de', 'de']
True
true
null
check_key
(values: dict, data: StepData)
Überprüft, ob ein Key vorhanden ist und setzt den dazugehörigen `key` bzw. den `new_keys` auf `true`. :param values: Werte aus der JSON-Datei :param data: Daten aus der API :return:
Überprüft, ob ein Key vorhanden ist und setzt den dazugehörigen `key` bzw. den `new_keys` auf `true`.
def check_key(values: dict, data: StepData): """Überprüft, ob ein Key vorhanden ist und setzt den dazugehörigen `key` bzw. den `new_keys` auf `true`. :param values: Werte aus der JSON-Datei :param data: Daten aus der API :return: """ for idx, key in enumerate(values["keys"]): try: data.get_data(key, values) value = True except StepKeyError: if "init_with" in values: init = data.deep_format(values["init_with"], values=values) data.insert_data(key, init, values) value = False if "new_keys" in values: data.insert_data(values["new_keys"][idx], value, values)
[ "def", "check_key", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "for", "idx", ",", "key", "in", "enumerate", "(", "values", "[", "\"keys\"", "]", ")", ":", "try", ":", "data", ".", "get_data", "(", "key", ",", "values", ")", "value", "=", "True", "except", "StepKeyError", ":", "if", "\"init_with\"", "in", "values", ":", "init", "=", "data", ".", "deep_format", "(", "values", "[", "\"init_with\"", "]", ",", "values", "=", "values", ")", "data", ".", "insert_data", "(", "key", ",", "init", ",", "values", ")", "value", "=", "False", "if", "\"new_keys\"", "in", "values", ":", "data", ".", "insert_data", "(", "values", "[", "\"new_keys\"", "]", "[", "idx", "]", ",", "value", ",", "values", ")" ]
[ 770, 0 ]
[ 788, 68 ]
null
python
de
['de', 'de', 'de']
True
true
null
load_db
()
return client[setup.MONGO_DB_NAME]
Erhalte ein DB Objekt
Erhalte ein DB Objekt
def load_db(): """Erhalte ein DB Objekt""" print('Connect to MongoDB...') client = MongoClient(setup.MONGO_DB_URL) return client[setup.MONGO_DB_NAME]
[ "def", "load_db", "(", ")", ":", "print", "(", "'Connect to MongoDB...'", ")", "client", "=", "MongoClient", "(", "setup", ".", "MONGO_DB_URL", ")", "return", "client", "[", "setup", ".", "MONGO_DB_NAME", "]" ]
[ 20, 0 ]
[ 25, 38 ]
null
python
de
['de', 'de', 'de']
True
true
null
Pipeline.start
(self)
Führt alle Schritte aus, die in der übergebenen Instanz der Klasse :class:`Steps` definiert sind. Initialisiert zuerst einen Pipeline-Ordner mit der Pipeline-ID. Dieser kann dann in der gesamten Pipeline zur Zwichenspeicherung von Dateien verwendet werden. Dieser wird nach Beendigung oder bei einem Fehlerfall wieder gelöscht. Führt alle Schritte aus der übergebenen Steps Instanz, die in der Funktion :func:`sequence` definiert sind, der Reihenfolge nach aus. Mit der Ausnahme von allen Steps mit der id < 0 und >= `step_max`. :return: Wenn ohne Fehler ausgeführt `True`, sonst `False` :rtype: bool
Führt alle Schritte aus, die in der übergebenen Instanz der Klasse :class:`Steps` definiert sind.
def start(self): """Führt alle Schritte aus, die in der übergebenen Instanz der Klasse :class:`Steps` definiert sind. Initialisiert zuerst einen Pipeline-Ordner mit der Pipeline-ID. Dieser kann dann in der gesamten Pipeline zur Zwichenspeicherung von Dateien verwendet werden. Dieser wird nach Beendigung oder bei einem Fehlerfall wieder gelöscht. Führt alle Schritte aus der übergebenen Steps Instanz, die in der Funktion :func:`sequence` definiert sind, der Reihenfolge nach aus. Mit der Ausnahme von allen Steps mit der id < 0 und >= `step_max`. :return: Wenn ohne Fehler ausgeführt `True`, sonst `False` :rtype: bool """ try: self.__setup() data = StepData(self.steps_config, self.id, self.__job_id, self.__config.get("presets", None)) logger.info(f"{self.__log_name} {self.id} started!") for self.__current_step in range(0, self.__steps_max): logger.info(f"Next step: {self.current_step_name()}") # Execute Step self.__steps[self.__current_step].get("call", lambda: None)(self.__config, data) logger.info(f"Step finished: {self.current_step_name()}!") self.__on_completion(self.__config, data) self.__cleanup() return True except (KeyboardInterrupt, SystemExit) as e: self.__error_cleanup(e) raise except Exception as e: self.__error_cleanup(e) if self.__attach_mode: raise return False
[ "def", "start", "(", "self", ")", ":", "try", ":", "self", ".", "__setup", "(", ")", "data", "=", "StepData", "(", "self", ".", "steps_config", ",", "self", ".", "id", ",", "self", ".", "__job_id", ",", "self", ".", "__config", ".", "get", "(", "\"presets\"", ",", "None", ")", ")", "logger", ".", "info", "(", "f\"{self.__log_name} {self.id} started!\"", ")", "for", "self", ".", "__current_step", "in", "range", "(", "0", ",", "self", ".", "__steps_max", ")", ":", "logger", ".", "info", "(", "f\"Next step: {self.current_step_name()}\"", ")", "# Execute Step", "self", ".", "__steps", "[", "self", ".", "__current_step", "]", ".", "get", "(", "\"call\"", ",", "lambda", ":", "None", ")", "(", "self", ".", "__config", ",", "data", ")", "logger", ".", "info", "(", "f\"Step finished: {self.current_step_name()}!\"", ")", "self", ".", "__on_completion", "(", "self", ".", "__config", ",", "data", ")", "self", ".", "__cleanup", "(", ")", "return", "True", "except", "(", "KeyboardInterrupt", ",", "SystemExit", ")", "as", "e", ":", "self", ".", "__error_cleanup", "(", "e", ")", "raise", "except", "Exception", "as", "e", ":", "self", ".", "__error_cleanup", "(", "e", ")", "if", "self", ".", "__attach_mode", ":", "raise", "return", "False" ]
[ 235, 4 ]
[ 274, 24 ]
null
python
de
['de', 'de', 'de']
True
true
null
register_draw
(func)
return register_type_func_no_data(DRAW_TYPES, ImageError, func)
Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block. Fügt eine Typ-Funktion dem Dictionary DRAW_TYPES hinzu. :param func: die zu registrierende Funktion :return: Funktion mit try/except-Block
Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block. Fügt eine Typ-Funktion dem Dictionary DRAW_TYPES hinzu.
def register_draw(func): """Registriert die übergebene Funktion und versieht sie mit einem `"try/except"`-Block. Fügt eine Typ-Funktion dem Dictionary DRAW_TYPES hinzu. :param func: die zu registrierende Funktion :return: Funktion mit try/except-Block """ return register_type_func_no_data(DRAW_TYPES, ImageError, func)
[ "def", "register_draw", "(", "func", ")", ":", "return", "register_type_func_no_data", "(", "DRAW_TYPES", ",", "ImageError", ",", "func", ")" ]
[ 14, 0 ]
[ 21, 67 ]
null
python
de
['de', 'de', 'de']
True
true
null
_unknown_handler
(value)
Helfer für json.dmps()) - stammt aus hujson
Helfer für json.dmps()) - stammt aus hujson
def _unknown_handler(value): """Helfer für json.dmps()) - stammt aus hujson""" if isinstance(value, datetime.date): return str(value) elif isinstance(value, datetime.datetime): return value.isoformat() + 'Z' elif isinstance(value, decimal.Decimal): return unicode(value) elif hasattr(value, 'dict_mit_positionen') and callable(value.dict_mit_positionen): # helpful for our internal data-modelling return value.dict_mit_positionen() elif hasattr(value, 'as_dict') and callable(value.as_dict): # helpful for structured.Struct() Objects return value.as_dict() # for Google AppEngine elif hasattr(value, 'properties') and callable(value.properties): properties = value.properties() if isinstance(properties, dict): keys = (key for (key, datatype) in properties.iteritems() if datatype.__class__.__name__ not in ['BlobProperty']) elif isinstance(properties, (set, list)): keys = properties else: return {} return dict((key, getattr(value, key)) for key in keys) elif hasattr(value, 'to_dict') and callable(value.to_dict): # ndb tmp = value.to_dict() if 'id' not in tmp and hasattr(value, 'key') and hasattr(value.key, 'id') and callable(value.key.id): tmp['id'] = value.key.id() return tmp elif hasattr(value, '_to_entity') and callable(value._to_entity): retdict = dict() value._to_entity(retdict) return retdict elif 'google.appengine.api.users.User' in str(type(value)): return "%s/%s" % (value.user_id(), value.email()) elif 'google.appengine.api.datastore_types.Key' in str(type(value)): return str(value) elif 'google.appengine.api.datastore_types.BlobKey' in str(type(value)): return str(value) # for Google AppEngine `ndb` elif (hasattr(value, '_properties') and hasattr(value._properties, 'items') and callable(value._properties.items)): return dict([(k, v._get_value(value)) for k, v in value._properties.items()]) elif hasattr(value, 'urlsafe') and callable(value.urlsafe): return str(value.urlsafe()) #elif hasattr(value, '_get_value') and callable(value._get_value): # retdict = dict() # value._get_value(retdict) # return retdict raise TypeError("%s(%s)" % (type(value), value))
[ "def", "_unknown_handler", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "datetime", ".", "date", ")", ":", "return", "str", "(", "value", ")", "elif", "isinstance", "(", "value", ",", "datetime", ".", "datetime", ")", ":", "return", "value", ".", "isoformat", "(", ")", "+", "'Z'", "elif", "isinstance", "(", "value", ",", "decimal", ".", "Decimal", ")", ":", "return", "unicode", "(", "value", ")", "elif", "hasattr", "(", "value", ",", "'dict_mit_positionen'", ")", "and", "callable", "(", "value", ".", "dict_mit_positionen", ")", ":", "# helpful for our internal data-modelling", "return", "value", ".", "dict_mit_positionen", "(", ")", "elif", "hasattr", "(", "value", ",", "'as_dict'", ")", "and", "callable", "(", "value", ".", "as_dict", ")", ":", "# helpful for structured.Struct() Objects", "return", "value", ".", "as_dict", "(", ")", "# for Google AppEngine", "elif", "hasattr", "(", "value", ",", "'properties'", ")", "and", "callable", "(", "value", ".", "properties", ")", ":", "properties", "=", "value", ".", "properties", "(", ")", "if", "isinstance", "(", "properties", ",", "dict", ")", ":", "keys", "=", "(", "key", "for", "(", "key", ",", "datatype", ")", "in", "properties", ".", "iteritems", "(", ")", "if", "datatype", ".", "__class__", ".", "__name__", "not", "in", "[", "'BlobProperty'", "]", ")", "elif", "isinstance", "(", "properties", ",", "(", "set", ",", "list", ")", ")", ":", "keys", "=", "properties", "else", ":", "return", "{", "}", "return", "dict", "(", "(", "key", ",", "getattr", "(", "value", ",", "key", ")", ")", "for", "key", "in", "keys", ")", "elif", "hasattr", "(", "value", ",", "'to_dict'", ")", "and", "callable", "(", "value", ".", "to_dict", ")", ":", "# ndb", "tmp", "=", "value", ".", "to_dict", "(", ")", "if", "'id'", "not", "in", "tmp", "and", "hasattr", "(", "value", ",", "'key'", ")", "and", "hasattr", "(", "value", ".", "key", ",", "'id'", ")", "and", "callable", "(", "value", ".", "key", ".", "id", ")", ":", "tmp", "[", "'id'", "]", "=", "value", ".", "key", ".", "id", "(", ")", "return", "tmp", "elif", "hasattr", "(", "value", ",", "'_to_entity'", ")", "and", "callable", "(", "value", ".", "_to_entity", ")", ":", "retdict", "=", "dict", "(", ")", "value", ".", "_to_entity", "(", "retdict", ")", "return", "retdict", "elif", "'google.appengine.api.users.User'", "in", "str", "(", "type", "(", "value", ")", ")", ":", "return", "\"%s/%s\"", "%", "(", "value", ".", "user_id", "(", ")", ",", "value", ".", "email", "(", ")", ")", "elif", "'google.appengine.api.datastore_types.Key'", "in", "str", "(", "type", "(", "value", ")", ")", ":", "return", "str", "(", "value", ")", "elif", "'google.appengine.api.datastore_types.BlobKey'", "in", "str", "(", "type", "(", "value", ")", ")", ":", "return", "str", "(", "value", ")", "# for Google AppEngine `ndb`", "elif", "(", "hasattr", "(", "value", ",", "'_properties'", ")", "and", "hasattr", "(", "value", ".", "_properties", ",", "'items'", ")", "and", "callable", "(", "value", ".", "_properties", ".", "items", ")", ")", ":", "return", "dict", "(", "[", "(", "k", ",", "v", ".", "_get_value", "(", "value", ")", ")", "for", "k", ",", "v", "in", "value", ".", "_properties", ".", "items", "(", ")", "]", ")", "elif", "hasattr", "(", "value", ",", "'urlsafe'", ")", "and", "callable", "(", "value", ".", "urlsafe", ")", ":", "return", "str", "(", "value", ".", "urlsafe", "(", ")", ")", "#elif hasattr(value, '_get_value') and callable(value._get_value):", "# retdict = dict()", "# value._get_value(retdict)", "# return retdict", "raise", "TypeError", "(", "\"%s(%s)\"", "%", "(", "type", "(", "value", ")", ",", "value", ")", ")" ]
[ 15, 0 ]
[ 66, 52 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispSAFRS._int_query
( cls, query=None, **kwargs)
return result
Eine query ohne paginate durchführen. Parameters ---------- query : obj Das bisherige query Object **kwargs : dict Beliebige weitere Argumente. verwendet wird type Returns ------- result : dict:: - data - count - meta - errors _asdict ist in row bei der zusätzlichen Columns durch add_columns ohne dies wird die row so verwendet
Eine query ohne paginate durchführen.
def _int_query( cls, query=None, **kwargs): """Eine query ohne paginate durchführen. Parameters ---------- query : obj Das bisherige query Object **kwargs : dict Beliebige weitere Argumente. verwendet wird type Returns ------- result : dict:: - data - count - meta - errors _asdict ist in row bei der zusätzlichen Columns durch add_columns ohne dies wird die row so verwendet """ _type = cls.__name__ if 'type' in kwargs: _type = kwargs['type'] data = [] if query: for row in query: # dies geht nur wenn in row _asdict vorhanden ist (z.B. group) if "_asdict" in dir(row): _row = row._asdict() _id = None if "id" in _row: _id = _row["id"] del _row["id"] # _id = data.append({ "attributes" : _row, "id": _id, "type": _type }) else: data.append( row ) # Anzahl aus query count = len( data ) result = { "data" : data, "count" : count, "meta": {}, "errors": [], } cls.appInfo("sql-lastquery", str( query ) ) return result
[ "def", "_int_query", "(", "cls", ",", "query", "=", "None", ",", "*", "*", "kwargs", ")", ":", "_type", "=", "cls", ".", "__name__", "if", "'type'", "in", "kwargs", ":", "_type", "=", "kwargs", "[", "'type'", "]", "data", "=", "[", "]", "if", "query", ":", "for", "row", "in", "query", ":", "# dies geht nur wenn in row _asdict vorhanden ist (z.B. group)", "if", "\"_asdict\"", "in", "dir", "(", "row", ")", ":", "_row", "=", "row", ".", "_asdict", "(", ")", "_id", "=", "None", "if", "\"id\"", "in", "_row", ":", "_id", "=", "_row", "[", "\"id\"", "]", "del", "_row", "[", "\"id\"", "]", "# _id =", "data", ".", "append", "(", "{", "\"attributes\"", ":", "_row", ",", "\"id\"", ":", "_id", ",", "\"type\"", ":", "_type", "}", ")", "else", ":", "data", ".", "append", "(", "row", ")", "# Anzahl aus query", "count", "=", "len", "(", "data", ")", "result", "=", "{", "\"data\"", ":", "data", ",", "\"count\"", ":", "count", ",", "\"meta\"", ":", "{", "}", ",", "\"errors\"", ":", "[", "]", ",", "}", "cls", ".", "appInfo", "(", "\"sql-lastquery\"", ",", "str", "(", "query", ")", ")", "return", "result" ]
[ 928, 4 ]
[ 985, 21 ]
null
python
de
['de', 'de', 'de']
True
true
null
sort
(values: dict, data: StepData)
Sortiert Wörter nach dem Alphabet oder Zahlen aufsteigend. Ist reverse auf True gesetzt, werden die Wörter zu Z nach A sortiert, bzw. Zahlen absteigend. Achtung: Sortierung von A nach Z ["Argentina", "Canada", "Cyprus", "Germany", "Norway", "Schweden", "USA", "United Kingdom", "Z"] "USA" ist vor "United Kingdom", weil bei "USA" der zweite Buchstabe auch groß geschrieben ist. Würde dort "Usa" statt "USA" stehen, wäre "United Kingdom" vor "USA". :param values: Werte aus der JSON-Datei :param data: Daten aus der API :return:
Sortiert Wörter nach dem Alphabet oder Zahlen aufsteigend.
def sort(values: dict, data: StepData): """Sortiert Wörter nach dem Alphabet oder Zahlen aufsteigend. Ist reverse auf True gesetzt, werden die Wörter zu Z nach A sortiert, bzw. Zahlen absteigend. Achtung: Sortierung von A nach Z ["Argentina", "Canada", "Cyprus", "Germany", "Norway", "Schweden", "USA", "United Kingdom", "Z"] "USA" ist vor "United Kingdom", weil bei "USA" der zweite Buchstabe auch groß geschrieben ist. Würde dort "Usa" statt "USA" stehen, wäre "United Kingdom" vor "USA". :param values: Werte aus der JSON-Datei :param data: Daten aus der API :return: """ for idx, key in data.loop_key(values["keys"], values): new_key = get_new_keys(values, idx) value = data.get_data(key, values) reverse = data.get_data(values.get("reverse", False), values, bool) new_value = sorted(value, reverse=reverse) data.insert_data(new_key, new_value, values)
[ "def", "sort", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "for", "idx", ",", "key", "in", "data", ".", "loop_key", "(", "values", "[", "\"keys\"", "]", ",", "values", ")", ":", "new_key", "=", "get_new_keys", "(", "values", ",", "idx", ")", "value", "=", "data", ".", "get_data", "(", "key", ",", "values", ")", "reverse", "=", "data", ".", "get_data", "(", "values", ".", "get", "(", "\"reverse\"", ",", "False", ")", ",", "values", ",", "bool", ")", "new_value", "=", "sorted", "(", "value", ",", "reverse", "=", "reverse", ")", "data", ".", "insert_data", "(", "new_key", ",", "new_value", ",", "values", ")" ]
[ 519, 0 ]
[ 539, 52 ]
null
python
de
['de', 'de', 'de']
True
true
null
timestamp
(values: dict, data: StepData)
Wandelt einen UNIX-Zeitstempel in ein anderes Format um. Wandelt einen UNIX-Zeitstempel in ein anderes Format um, welches unter `"format"` angegeben wird. Ist zeropaded_off true, so wird aus z.B. 05 eine 5. :param values: Werte aus der JSON-Datei :param data: Daten aus der API
Wandelt einen UNIX-Zeitstempel in ein anderes Format um.
def timestamp(values: dict, data: StepData): """Wandelt einen UNIX-Zeitstempel in ein anderes Format um. Wandelt einen UNIX-Zeitstempel in ein anderes Format um, welches unter `"format"` angegeben wird. Ist zeropaded_off true, so wird aus z.B. 05 eine 5. :param values: Werte aus der JSON-Datei :param data: Daten aus der API """ for idx, key in data.loop_key(values["keys"], values): value = data.get_data(key, values) date = datetime.fromtimestamp(value) new_key = get_new_keys(values, idx) zeropaded_off = data.get_data(values.get("zeropaded_off", False), values, bool) if zeropaded_off: new_value = date.strftime(data.format(values["format"], values)).lstrip("0").replace(" 0", " ") else: new_value = date.strftime(data.format(values["format"], values)) data.insert_data(new_key, new_value, values)
[ "def", "timestamp", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "for", "idx", ",", "key", "in", "data", ".", "loop_key", "(", "values", "[", "\"keys\"", "]", ",", "values", ")", ":", "value", "=", "data", ".", "get_data", "(", "key", ",", "values", ")", "date", "=", "datetime", ".", "fromtimestamp", "(", "value", ")", "new_key", "=", "get_new_keys", "(", "values", ",", "idx", ")", "zeropaded_off", "=", "data", ".", "get_data", "(", "values", ".", "get", "(", "\"zeropaded_off\"", ",", "False", ")", ",", "values", ",", "bool", ")", "if", "zeropaded_off", ":", "new_value", "=", "date", ".", "strftime", "(", "data", ".", "format", "(", "values", "[", "\"format\"", "]", ",", "values", ")", ")", ".", "lstrip", "(", "\"0\"", ")", ".", "replace", "(", "\" 0\"", ",", "\" \"", ")", "else", ":", "new_value", "=", "date", ".", "strftime", "(", "data", ".", "format", "(", "values", "[", "\"format\"", "]", ",", "values", ")", ")", "data", ".", "insert_data", "(", "new_key", ",", "new_value", ",", "values", ")" ]
[ 302, 0 ]
[ 320, 52 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispTest.test_webapp_db_relation
( self )
Api aufruf für relative Tabellen api/dbtestsrel?filter=eq(dbtests_id,2) [{'attributes': {'dbtests_id': 2, 'rdata': None, 'rdate': None, 'rgroup': 'B', 'rinteger': 12, 'rstring': 'r_zwei'}, 'id': '2', 'links': {'self': 'http://localhost/api/dbtestsrel/2/'}, 'relationships': {'dbtests': {'data': None, 'links': {'self': 'http://localhost/api/dbtestsrel/2/dbtests'}}}, 'type': 'dbtestsrel'}]
Api aufruf für relative Tabellen api/dbtestsrel?filter=eq(dbtests_id,2) [{'attributes': {'dbtests_id': 2, 'rdata': None, 'rdate': None, 'rgroup': 'B', 'rinteger': 12, 'rstring': 'r_zwei'}, 'id': '2', 'links': {'self': 'http://localhost/api/dbtestsrel/2/'}, 'relationships': {'dbtests': {'data': None, 'links': {'self': 'http://localhost/api/dbtestsrel/2/dbtests'}}}, 'type': 'dbtestsrel'}]
def test_webapp_db_relation( self ): ''' Api aufruf für relative Tabellen api/dbtestsrel?filter=eq(dbtests_id,2) [{'attributes': {'dbtests_id': 2, 'rdata': None, 'rdate': None, 'rgroup': 'B', 'rinteger': 12, 'rstring': 'r_zwei'}, 'id': '2', 'links': {'self': 'http://localhost/api/dbtestsrel/2/'}, 'relationships': {'dbtests': {'data': None, 'links': {'self': 'http://localhost/api/dbtestsrel/2/dbtests'}}}, 'type': 'dbtestsrel'}] ''' # zuerst den zugriff testen und prüfen ob die tabelle leer ist # response = self.app.get( "api/dbtests/") self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( len( response.json["data"] ), 5, "keine 5 Datensätze" ) response = self.app.get( "api/dbtestsrel/") self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( len(response.json["data"]), 5, "keine 5 Datensätze" ) # daten über path und filter müssen gleich sein nur die globale links Angabe unterscheidet sich # http://127.0.0.1:5000/api/nutzung?_ispcp={%22_default%22:{%22ersatz_id%22:1754}}&filter=eq(ersatz_id,1754)&page[offset]=0&page[limit]=25 response = self.app.get( "api/dbtests/2/dbtestsrel") self.assertEqual(response.status_code, 200, "Api Status nicht 200") reldata = response.json response = self.app.get( "api/dbtestsrel", query_string={ "filter":"eq(dbtests_id,2)" }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( reldata["data"], response.json["data"], "Rückgaben sind nicht gleich" )
[ "def", "test_webapp_db_relation", "(", "self", ")", ":", "# zuerst den zugriff testen und prüfen ob die tabelle leer ist", "#", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/dbtests/\"", ")", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Api Status nicht 200\"", ")", "self", ".", "assertEqual", "(", "len", "(", "response", ".", "json", "[", "\"data\"", "]", ")", ",", "5", ",", "\"keine 5 Datensätze\"", ")", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/dbtestsrel/\"", ")", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Api Status nicht 200\"", ")", "self", ".", "assertEqual", "(", "len", "(", "response", ".", "json", "[", "\"data\"", "]", ")", ",", "5", ",", "\"keine 5 Datensätze\"", ")", "# daten über path und filter müssen gleich sein nur die globale links Angabe unterscheidet sich", "# http://127.0.0.1:5000/api/nutzung?_ispcp={%22_default%22:{%22ersatz_id%22:1754}}&filter=eq(ersatz_id,1754)&page[offset]=0&page[limit]=25", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/dbtests/2/dbtestsrel\"", ")", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Api Status nicht 200\"", ")", "reldata", "=", "response", ".", "json", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/dbtestsrel\"", ",", "query_string", "=", "{", "\"filter\"", ":", "\"eq(dbtests_id,2)\"", "}", ")", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Api Status nicht 200\"", ")", "self", ".", "assertEqual", "(", "reldata", "[", "\"data\"", "]", ",", "response", ".", "json", "[", "\"data\"", "]", ",", "\"Rückgaben sind nicht gleich\"", ")" ]
[ 2024, 4 ]
[ 2060, 9 ]
null
python
de
['de', 'de', 'de']
True
true
null
MQTTclass.error
(self, msg)
logging.error wenn logger angegeben dorthin sonst auf die console. Parameters ---------- msg : str Auszugebende Nachricht Returns ------- None.
logging.error wenn logger angegeben dorthin sonst auf die console.
def error(self, msg): """logging.error wenn logger angegeben dorthin sonst auf die console. Parameters ---------- msg : str Auszugebende Nachricht Returns ------- None. """ logger = logging.getLogger( "MQTT" ) if logger.hasHandlers(): logger.error( msg ) else: print("### MQTT ERROR", msg)
[ "def", "error", "(", "self", ",", "msg", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", "\"MQTT\"", ")", "if", "logger", ".", "hasHandlers", "(", ")", ":", "logger", ".", "error", "(", "msg", ")", "else", ":", "print", "(", "\"### MQTT ERROR\"", ",", "msg", ")" ]
[ 274, 4 ]
[ 291, 40 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispConfig._configLoad
( self, lastOverlay:int=99999999 )
Konfiguration aus config.json einlesen. Die Datei muss sich ab _basedir im Verzeichniss config befinden Alle config Dateien bis zu der durch _overlayLast gebildeten einlesen Parameters ---------- lastOverlay : int Default is 99999999
Konfiguration aus config.json einlesen.
def _configLoad( self, lastOverlay:int=99999999 ): """Konfiguration aus config.json einlesen. Die Datei muss sich ab _basedir im Verzeichniss config befinden Alle config Dateien bis zu der durch _overlayLast gebildeten einlesen Parameters ---------- lastOverlay : int Default is 99999999 """ def readConfig( filename:str ): if osp.isfile( filename ): # zuerst die normale config Datei einlesen with open( filename, 'r') as f: try: config = json.load( f ) self._config = dict_merge(self._config, DotMap( config ) ) self._configs.append( osp.basename( filename ) ) except: # Fehler auch hier anzeigen, da noch kein logger bereitsteht self._loadErrors.append( filename ) self._configs.append( osp.basename( filename ) + " - ERROR" ) print( "CONFIG: Fehler bei json.load", filename ) pass # den pfad zur konfiguration festlegen configPath = osp.join( self._basedir, "config") # zuerst die normale config Datei einlesen readConfig( osp.join( configPath, "config.json") ) # jetzt alle anderen overlay dateien sortiert einlesen und überlagern configs = glob.glob(osp.join( configPath, 'config-*.json') ) if len(configs) > 0: configs.sort() # alle config Dateien mit Zahlen nach dem - zusammenstellen for name in configs: res = re.search('config-([0-9]*)\.json', name ) # jahr und monat als zahl umwandeln, ein jahr allein wird mit 00 ergänzt ym = 99999999 if res: ym = int( res.group(1) ) if ym <= lastOverlay: readConfig( name )
[ "def", "_configLoad", "(", "self", ",", "lastOverlay", ":", "int", "=", "99999999", ")", ":", "def", "readConfig", "(", "filename", ":", "str", ")", ":", "if", "osp", ".", "isfile", "(", "filename", ")", ":", "# zuerst die normale config Datei einlesen", "with", "open", "(", "filename", ",", "'r'", ")", "as", "f", ":", "try", ":", "config", "=", "json", ".", "load", "(", "f", ")", "self", ".", "_config", "=", "dict_merge", "(", "self", ".", "_config", ",", "DotMap", "(", "config", ")", ")", "self", ".", "_configs", ".", "append", "(", "osp", ".", "basename", "(", "filename", ")", ")", "except", ":", "# Fehler auch hier anzeigen, da noch kein logger bereitsteht", "self", ".", "_loadErrors", ".", "append", "(", "filename", ")", "self", ".", "_configs", ".", "append", "(", "osp", ".", "basename", "(", "filename", ")", "+", "\" - ERROR\"", ")", "print", "(", "\"CONFIG: Fehler bei json.load\"", ",", "filename", ")", "pass", "# den pfad zur konfiguration festlegen", "configPath", "=", "osp", ".", "join", "(", "self", ".", "_basedir", ",", "\"config\"", ")", "# zuerst die normale config Datei einlesen", "readConfig", "(", "osp", ".", "join", "(", "configPath", ",", "\"config.json\"", ")", ")", "# jetzt alle anderen overlay dateien sortiert einlesen und überlagern", "configs", "=", "glob", ".", "glob", "(", "osp", ".", "join", "(", "configPath", ",", "'config-*.json'", ")", ")", "if", "len", "(", "configs", ")", ">", "0", ":", "configs", ".", "sort", "(", ")", "# alle config Dateien mit Zahlen nach dem - zusammenstellen", "for", "name", "in", "configs", ":", "res", "=", "re", ".", "search", "(", "'config-([0-9]*)\\.json'", ",", "name", ")", "# jahr und monat als zahl umwandeln, ein jahr allein wird mit 00 ergänzt", "ym", "=", "99999999", "if", "res", ":", "ym", "=", "int", "(", "res", ".", "group", "(", "1", ")", ")", "if", "ym", "<=", "lastOverlay", ":", "readConfig", "(", "name", ")" ]
[ 268, 4 ]
[ 314, 42 ]
null
python
de
['de', 'de', 'de']
True
true
null
get_weekspan
(date)
return startdate, enddate
Gibt den ersten und den letzten Tag der Woche, in der `date` liegt, zurück. Dabei ist Montag der erste Tag der woche und Sonntag der letzte. >>> get_weekspan(datetime.date(2011, 3, 23)) (datetime.date(2011, 3, 21), datetime.date(2011, 3, 27))
Gibt den ersten und den letzten Tag der Woche, in der `date` liegt, zurück.
def get_weekspan(date): """Gibt den ersten und den letzten Tag der Woche, in der `date` liegt, zurück. Dabei ist Montag der erste Tag der woche und Sonntag der letzte. >>> get_weekspan(datetime.date(2011, 3, 23)) (datetime.date(2011, 3, 21), datetime.date(2011, 3, 27)) """ startdate = date_trunc('week', date) enddate = startdate + datetime.timedelta(days=6) return startdate, enddate
[ "def", "get_weekspan", "(", "date", ")", ":", "startdate", "=", "date_trunc", "(", "'week'", ",", "date", ")", "enddate", "=", "startdate", "+", "datetime", ".", "timedelta", "(", "days", "=", "6", ")", "return", "startdate", ",", "enddate" ]
[ 198, 0 ]
[ 208, 29 ]
null
python
de
['de', 'de', 'de']
True
true
null
copy
(values: dict, data: StepData)
Kopiert einen Wert zu einem neuen Key. :param values: Werte aus der JSON-Datei :param data: Daten aus der API
Kopiert einen Wert zu einem neuen Key.
def copy(values: dict, data: StepData): """Kopiert einen Wert zu einem neuen Key. :param values: Werte aus der JSON-Datei :param data: Daten aus der API """ for idx, key in data.loop_key(values["keys"], values): new_key = get_new_keys(values, idx) new_value = deepcopy(data.get_data(key, values)) data.insert_data(new_key, new_value, values)
[ "def", "copy", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "for", "idx", ",", "key", "in", "data", ".", "loop_key", "(", "values", "[", "\"keys\"", "]", ",", "values", ")", ":", "new_key", "=", "get_new_keys", "(", "values", ",", "idx", ")", "new_value", "=", "deepcopy", "(", "data", ".", "get_data", "(", "key", ",", "values", ")", ")", "data", ".", "insert_data", "(", "new_key", ",", "new_value", ",", "values", ")" ]
[ 428, 0 ]
[ 437, 52 ]
null
python
de
['de', 'de', 'de']
True
true
null
qa_field.__init__
( self, checkField, baseField=None, normalize: str="none" )
checkField und ggf baseField laden und ispCheckClass initialisieren
checkField und ggf baseField laden und ispCheckClass initialisieren
def __init__( self, checkField, baseField=None, normalize: str="none" ): """ checkField und ggf baseField laden und ispCheckClass initialisieren """ self.checkField = checkField self.baseField = baseField if self.checkField and self.baseField: # checkField und baseField wurden angegeben, normalize möglich # self.image und self.baseImage initialisieren und ggf normalisieren ispCheckClass.__init__( self, image=FSImage( self.checkField ), baseImage=FSImage( self.baseField ), normalize=normalize ) elif self.checkField: # nur checkfield wurde angegeben # self.image initialisieren ispCheckClass.__init__( self, image=FSImage( self.checkField ) )
[ "def", "__init__", "(", "self", ",", "checkField", ",", "baseField", "=", "None", ",", "normalize", ":", "str", "=", "\"none\"", ")", ":", "self", ".", "checkField", "=", "checkField", "self", ".", "baseField", "=", "baseField", "if", "self", ".", "checkField", "and", "self", ".", "baseField", ":", "# checkField und baseField wurden angegeben, normalize möglich", "# self.image und self.baseImage initialisieren und ggf normalisieren", "ispCheckClass", ".", "__init__", "(", "self", ",", "image", "=", "FSImage", "(", "self", ".", "checkField", ")", ",", "baseImage", "=", "FSImage", "(", "self", ".", "baseField", ")", ",", "normalize", "=", "normalize", ")", "elif", "self", ".", "checkField", ":", "# nur checkfield wurde angegeben", "# self.image initialisieren", "ispCheckClass", ".", "__init__", "(", "self", ",", "image", "=", "FSImage", "(", "self", ".", "checkField", ")", ")" ]
[ 66, 4 ]
[ 87, 13 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispDicom.SERIES
( self, query:dict={} )
return self.query( ds )
Führt eine suche nach SERIES durch. Wie query mit einem default Dataset Parameters ---------- query : dict, optional query parameter für ds. The default is {}. Returns ------- results : list gefundene daten status : hex Rückgabecode von send_c_find::
Führt eine suche nach SERIES durch.
def SERIES( self, query:dict={} ): """Führt eine suche nach SERIES durch. Wie query mit einem default Dataset Parameters ---------- query : dict, optional query parameter für ds. The default is {}. Returns ------- results : list gefundene daten status : hex Rückgabecode von send_c_find:: """ ds_model = dicomQueryDefaults["PATIENT"].copy() ds_model.update( dicomQueryDefaults["STUDY"] ) ds_model.update( dicomQueryDefaults["SERIES"] ) ds_model.update( query ) ds = Dataset() for name, value in ds_model.items(): ds.__setattr__(name, value) # Abfrage durchführen return self.query( ds )
[ "def", "SERIES", "(", "self", ",", "query", ":", "dict", "=", "{", "}", ")", ":", "ds_model", "=", "dicomQueryDefaults", "[", "\"PATIENT\"", "]", ".", "copy", "(", ")", "ds_model", ".", "update", "(", "dicomQueryDefaults", "[", "\"STUDY\"", "]", ")", "ds_model", ".", "update", "(", "dicomQueryDefaults", "[", "\"SERIES\"", "]", ")", "ds_model", ".", "update", "(", "query", ")", "ds", "=", "Dataset", "(", ")", "for", "name", ",", "value", "in", "ds_model", ".", "items", "(", ")", ":", "ds", ".", "__setattr__", "(", "name", ",", "value", ")", "# Abfrage durchführen", "return", "self", ".", "query", "(", "ds", ")" ]
[ 759, 4 ]
[ 787, 31 ]
null
python
de
['de', 'de', 'de']
True
true
null
lese_abschnitt_basiszinsen
(abschnitt)
return result
Hole die Basiszinsen aus dem Abschnitt der INI-Datei :param abschnitt: eingelesener Basiszins-Abschnitt aus der INI-Datei :type abschnitt: Section :return: dict mit key=Jahr und value=Basiszinsprozentsatz :rtype: dict
Hole die Basiszinsen aus dem Abschnitt der INI-Datei
def lese_abschnitt_basiszinsen(abschnitt): """ Hole die Basiszinsen aus dem Abschnitt der INI-Datei :param abschnitt: eingelesener Basiszins-Abschnitt aus der INI-Datei :type abschnitt: Section :return: dict mit key=Jahr und value=Basiszinsprozentsatz :rtype: dict """ result = {} for item in abschnitt: try: jahr = int(item) except ValueError: print("Fehler: Jahr im Abschnitt %s keine Zahl: %s" ( BASISZINS_SECTION, item)) sys.exit(2) # Beispiel: 0,87% # raw: keine Interpolation des "%" prozwert = abschnitt.get(item, raw=True) try: result[jahr] = prozent_conv(prozwert) except: print("* Fehler beim Einlesen der Basiszinsen für Jahr %s: %s" % (jahr, prozwert)) sys.exit(3) return result
[ "def", "lese_abschnitt_basiszinsen", "(", "abschnitt", ")", ":", "result", "=", "{", "}", "for", "item", "in", "abschnitt", ":", "try", ":", "jahr", "=", "int", "(", "item", ")", "except", "ValueError", ":", "print", "(", "\"Fehler: Jahr im Abschnitt %s keine Zahl: %s\"", "(", "BASISZINS_SECTION", ",", "item", ")", ")", "sys", ".", "exit", "(", "2", ")", "# Beispiel: 0,87%", "# raw: keine Interpolation des \"%\"", "prozwert", "=", "abschnitt", ".", "get", "(", "item", ",", "raw", "=", "True", ")", "try", ":", "result", "[", "jahr", "]", "=", "prozent_conv", "(", "prozwert", ")", "except", ":", "print", "(", "\"* Fehler beim Einlesen der Basiszinsen für Jahr %s: %s\" ", "", "(", "jahr", ",", "prozwert", ")", ")", "sys", ".", "exit", "(", "3", ")", "return", "result" ]
[ 196, 0 ]
[ 224, 17 ]
null
python
de
['de', 'de', 'de']
True
true
null
testBase.setUp
(self)
wird vor jedem test aufgerufen
wird vor jedem test aufgerufen
def setUp(self): ''' wird vor jedem test aufgerufen ''' pass
[ "def", "setUp", "(", "self", ")", ":", "pass" ]
[ 928, 4 ]
[ 931, 12 ]
null
python
de
['de', 'de', 'de']
True
true
null
checkMlc.doMT_8_02_1_2
(self, fileData )
return self._doLamellenpositioniergenauigkeit(fileData, md)
Lamellenpositioniergenauigkeit FWHM sucht den Peak für alle Leaves, Position des Peaks entspricht Position der Leaves, Breite des Peaks gibt Abstand der gegenüberliegenden Leaf-Paare. Parameters ---------- fileData : pandas.dataframe Returns ------- pdfFilename : str Name der erzeugten Pdfdatei result : list list mit dicts der Testergebnisse See Also -------- isp.results : Aufbau von result
Lamellenpositioniergenauigkeit FWHM sucht den Peak für alle Leaves, Position des Peaks entspricht Position der Leaves,
def doMT_8_02_1_2(self, fileData ): """Lamellenpositioniergenauigkeit FWHM sucht den Peak für alle Leaves, Position des Peaks entspricht Position der Leaves, Breite des Peaks gibt Abstand der gegenüberliegenden Leaf-Paare. Parameters ---------- fileData : pandas.dataframe Returns ------- pdfFilename : str Name der erzeugten Pdfdatei result : list list mit dicts der Testergebnisse See Also -------- isp.results : Aufbau von result """ # metadata defaults vorbereiten md = dict_merge( DotMap( { "series_sort_values" : ["gantry", "collimator"], "series_groupby" : ['day', 'SeriesNumber'], "manual": { "filename": self.metadata.info["anleitung"], "attrs": {"class":"layout-fill-width"}, }, "_leafPlot" : { "width" : 45, "height" : 45}, "_boxPlot" : { "width" : 90, "height" : 45}, "plotTitle" : "lfd:{lfd:d} G:{gantry:01.1f} K:{collimator:01.1f}", "table_fields" : [ {'field': 'lfd', 'label':'lfd', 'format':'{0:d}' }, {'field': 'gantry', 'label':'Gantry', 'format':'{0:1.1f}' }, {'field': 'collimator', 'label': 'Kollimator', 'format':'{0:1.1f}' }, {'field': 'fwxm.min', 'label':'FWXM<br>min', 'format':'{0:.3f}' }, {'field': 'fwxm.mean', 'label':'FWXM<br>mean', 'format':'{0:.3f}' }, {'field': 'fwxm.max', 'label':'FWXM<br>max', 'format':'{0:.3f}' }, {'field': 'fwxm.passed', 'label':'FWXM<br>passed' }, {'field': 'shift.min', 'label':'Shift<br>min', 'format':'{0:.3f}' }, {'field': 'shift.mean', 'label':'Shift<br>mean', 'format':'{0:.3f}' }, {'field': 'shift.max', 'label':'Shift<br>max', 'format':'{0:.3f}' }, {'field': 'shift.passed', 'label':'Shift<br>passed' }, ], "options":{ "leafs" : { "from": 1, "to" : 60 } } }), self.metadata ) return self._doLamellenpositioniergenauigkeit(fileData, md)
[ "def", "doMT_8_02_1_2", "(", "self", ",", "fileData", ")", ":", "# metadata defaults vorbereiten", "md", "=", "dict_merge", "(", "DotMap", "(", "{", "\"series_sort_values\"", ":", "[", "\"gantry\"", ",", "\"collimator\"", "]", ",", "\"series_groupby\"", ":", "[", "'day'", ",", "'SeriesNumber'", "]", ",", "\"manual\"", ":", "{", "\"filename\"", ":", "self", ".", "metadata", ".", "info", "[", "\"anleitung\"", "]", ",", "\"attrs\"", ":", "{", "\"class\"", ":", "\"layout-fill-width\"", "}", ",", "}", ",", "\"_leafPlot\"", ":", "{", "\"width\"", ":", "45", ",", "\"height\"", ":", "45", "}", ",", "\"_boxPlot\"", ":", "{", "\"width\"", ":", "90", ",", "\"height\"", ":", "45", "}", ",", "\"plotTitle\"", ":", "\"lfd:{lfd:d} G:{gantry:01.1f} K:{collimator:01.1f}\"", ",", "\"table_fields\"", ":", "[", "{", "'field'", ":", "'lfd'", ",", "'label'", ":", "'lfd'", ",", "'format'", ":", "'{0:d}'", "}", ",", "{", "'field'", ":", "'gantry'", ",", "'label'", ":", "'Gantry'", ",", "'format'", ":", "'{0:1.1f}'", "}", ",", "{", "'field'", ":", "'collimator'", ",", "'label'", ":", "'Kollimator'", ",", "'format'", ":", "'{0:1.1f}'", "}", ",", "{", "'field'", ":", "'fwxm.min'", ",", "'label'", ":", "'FWXM<br>min'", ",", "'format'", ":", "'{0:.3f}'", "}", ",", "{", "'field'", ":", "'fwxm.mean'", ",", "'label'", ":", "'FWXM<br>mean'", ",", "'format'", ":", "'{0:.3f}'", "}", ",", "{", "'field'", ":", "'fwxm.max'", ",", "'label'", ":", "'FWXM<br>max'", ",", "'format'", ":", "'{0:.3f}'", "}", ",", "{", "'field'", ":", "'fwxm.passed'", ",", "'label'", ":", "'FWXM<br>passed'", "}", ",", "{", "'field'", ":", "'shift.min'", ",", "'label'", ":", "'Shift<br>min'", ",", "'format'", ":", "'{0:.3f}'", "}", ",", "{", "'field'", ":", "'shift.mean'", ",", "'label'", ":", "'Shift<br>mean'", ",", "'format'", ":", "'{0:.3f}'", "}", ",", "{", "'field'", ":", "'shift.max'", ",", "'label'", ":", "'Shift<br>max'", ",", "'format'", ":", "'{0:.3f}'", "}", ",", "{", "'field'", ":", "'shift.passed'", ",", "'label'", ":", "'Shift<br>passed'", "}", ",", "]", ",", "\"options\"", ":", "{", "\"leafs\"", ":", "{", "\"from\"", ":", "1", ",", "\"to\"", ":", "60", "}", "}", "}", ")", ",", "self", ".", "metadata", ")", "return", "self", ".", "_doLamellenpositioniergenauigkeit", "(", "fileData", ",", "md", ")" ]
[ 2156, 4 ]
[ 2212, 67 ]
null
python
de
['de', 'de', 'de']
False
true
null
ispDicom.initAE
( self )
return status
Application Entity bereitstellen Status Codes: Non-service specific - 0xC000 to 0xC0FF Verwendete evt_handlers:: * EVT_ESTABLISHED * EVT_REJECTED * EVT_RELEASED Returns ------- status : hex - 0x0000 - alles OK - 0xC0FF -Verbindung fehlgeschlagen
Application Entity bereitstellen
def initAE( self ): """Application Entity bereitstellen Status Codes: Non-service specific - 0xC000 to 0xC0FF Verwendete evt_handlers:: * EVT_ESTABLISHED * EVT_REJECTED * EVT_RELEASED Returns ------- status : hex - 0x0000 - alles OK - 0xC0FF -Verbindung fehlgeschlagen """ # sicherheitshalber bestehende schließen self.closeAE() # Initialise the Application Entity assoc = None try: # Initialise the Application Entity aet = self.config.dicom[self.server]["aet"] self.ae = AE( ae_title=aet ) # Patient Suche verwenden self.ae.requested_contexts = QueryRetrievePresentationContexts # patient level self.ae.add_requested_context( PatientRootQueryRetrieveInformationModelFind ) self.ae.add_requested_context( PatientRootQueryRetrieveInformationModelMove ) self.ae.add_requested_context( PatientRootQueryRetrieveInformationModelGet ) # Study level self.ae.add_requested_context( StudyRootQueryRetrieveInformationModelFind ) self.ae.add_requested_context( StudyRootQueryRetrieveInformationModelMove ) self.ae.add_requested_context( StudyRootQueryRetrieveInformationModelGet ) # patientStudyOnly self.ae.add_requested_context( PatientStudyOnlyQueryRetrieveInformationModelFind ) self.ae.add_requested_context( PatientStudyOnlyQueryRetrieveInformationModelMove ) self.ae.add_requested_context( PatientStudyOnlyQueryRetrieveInformationModelGet ) # Add the requested presentation context (Storage SCP) self.ae.add_requested_context( CTImageStorage ) self.ae.add_requested_context( XRayRadiationDoseSRStorage ) # use all Storages and Transfers storage_sop_classes = [ cx.abstract_syntax for cx in AllStoragePresentationContexts ] for uid in storage_sop_classes: self.ae.add_supported_context(uid, ALL_TRANSFER_SYNTAXES) # bei den handlern wird nicht auf EVT_REJECTED geprüft, da offline ja möglich ist handlers=[ ( evt.EVT_ESTABLISHED , self.handle_EVENT), #( evt.EVT_REJECTED , self.handle_event), ( evt.EVT_RELEASED, self.handle_EVENT), # für send_c_get ( evt.EVT_C_STORE, self.handle_STORE), ] # requestmode für den server festlegen: c_move oder c_get self.request_mode = self.config.get( ["dicom", self.server, "request_mode"], "c_move" ) # request_query_model für den server festlegen: P-patient S-series O-PS only self.request_query_model = self.config.get( ["dicom", self.server, "request_query_model"], "S" ) # Create an SCP/SCU Role Selection Negotiation item for CT Image Storage roles = [] roles.append( build_role(CTImageStorage, scp_role=True, scu_role=True ) ) roles.append( build_role(XRayRadiationDoseSRStorage, scp_role=True, scu_role=True) ) # Associate with peer AE assoc = self.ae.associate( self.config.dicom[self.server]['server_ip'], self.config.dicom[self.server]['server_port'], ae_title=self.config.dicom[self.server]['aec'], evt_handlers=handlers, ext_neg=roles ) except: # pragma: no cover pass self.assoc = None status = 0xC0FF if assoc and assoc.is_established: self.assoc = assoc status = 0x0000 logger.debug('dicomClass.initAE: Verbindung hergestellt') else: # pragma: no cover logger.warning('dicomClass.initAE: Association rejected, aborted or never connected') return status
[ "def", "initAE", "(", "self", ")", ":", "# sicherheitshalber bestehende schließen", "self", ".", "closeAE", "(", ")", "# Initialise the Application Entity", "assoc", "=", "None", "try", ":", "# Initialise the Application Entity", "aet", "=", "self", ".", "config", ".", "dicom", "[", "self", ".", "server", "]", "[", "\"aet\"", "]", "self", ".", "ae", "=", "AE", "(", "ae_title", "=", "aet", ")", "# Patient Suche verwenden", "self", ".", "ae", ".", "requested_contexts", "=", "QueryRetrievePresentationContexts", "# patient level", "self", ".", "ae", ".", "add_requested_context", "(", "PatientRootQueryRetrieveInformationModelFind", ")", "self", ".", "ae", ".", "add_requested_context", "(", "PatientRootQueryRetrieveInformationModelMove", ")", "self", ".", "ae", ".", "add_requested_context", "(", "PatientRootQueryRetrieveInformationModelGet", ")", "# Study level", "self", ".", "ae", ".", "add_requested_context", "(", "StudyRootQueryRetrieveInformationModelFind", ")", "self", ".", "ae", ".", "add_requested_context", "(", "StudyRootQueryRetrieveInformationModelMove", ")", "self", ".", "ae", ".", "add_requested_context", "(", "StudyRootQueryRetrieveInformationModelGet", ")", "# patientStudyOnly", "self", ".", "ae", ".", "add_requested_context", "(", "PatientStudyOnlyQueryRetrieveInformationModelFind", ")", "self", ".", "ae", ".", "add_requested_context", "(", "PatientStudyOnlyQueryRetrieveInformationModelMove", ")", "self", ".", "ae", ".", "add_requested_context", "(", "PatientStudyOnlyQueryRetrieveInformationModelGet", ")", "# Add the requested presentation context (Storage SCP)", "self", ".", "ae", ".", "add_requested_context", "(", "CTImageStorage", ")", "self", ".", "ae", ".", "add_requested_context", "(", "XRayRadiationDoseSRStorage", ")", "# use all Storages and Transfers", "storage_sop_classes", "=", "[", "cx", ".", "abstract_syntax", "for", "cx", "in", "AllStoragePresentationContexts", "]", "for", "uid", "in", "storage_sop_classes", ":", "self", ".", "ae", ".", "add_supported_context", "(", "uid", ",", "ALL_TRANSFER_SYNTAXES", ")", "# bei den handlern wird nicht auf EVT_REJECTED geprüft, da offline ja möglich ist", "handlers", "=", "[", "(", "evt", ".", "EVT_ESTABLISHED", ",", "self", ".", "handle_EVENT", ")", ",", "#( evt.EVT_REJECTED , self.handle_event),", "(", "evt", ".", "EVT_RELEASED", ",", "self", ".", "handle_EVENT", ")", ",", "# für send_c_get", "(", "evt", ".", "EVT_C_STORE", ",", "self", ".", "handle_STORE", ")", ",", "]", "# requestmode für den server festlegen: c_move oder c_get", "self", ".", "request_mode", "=", "self", ".", "config", ".", "get", "(", "[", "\"dicom\"", ",", "self", ".", "server", ",", "\"request_mode\"", "]", ",", "\"c_move\"", ")", "# request_query_model für den server festlegen: P-patient S-series O-PS only", "self", ".", "request_query_model", "=", "self", ".", "config", ".", "get", "(", "[", "\"dicom\"", ",", "self", ".", "server", ",", "\"request_query_model\"", "]", ",", "\"S\"", ")", "# Create an SCP/SCU Role Selection Negotiation item for CT Image Storage", "roles", "=", "[", "]", "roles", ".", "append", "(", "build_role", "(", "CTImageStorage", ",", "scp_role", "=", "True", ",", "scu_role", "=", "True", ")", ")", "roles", ".", "append", "(", "build_role", "(", "XRayRadiationDoseSRStorage", ",", "scp_role", "=", "True", ",", "scu_role", "=", "True", ")", ")", "# Associate with peer AE", "assoc", "=", "self", ".", "ae", ".", "associate", "(", "self", ".", "config", ".", "dicom", "[", "self", ".", "server", "]", "[", "'server_ip'", "]", ",", "self", ".", "config", ".", "dicom", "[", "self", ".", "server", "]", "[", "'server_port'", "]", ",", "ae_title", "=", "self", ".", "config", ".", "dicom", "[", "self", ".", "server", "]", "[", "'aec'", "]", ",", "evt_handlers", "=", "handlers", ",", "ext_neg", "=", "roles", ")", "except", ":", "# pragma: no cover", "pass", "self", ".", "assoc", "=", "None", "status", "=", "0xC0FF", "if", "assoc", "and", "assoc", ".", "is_established", ":", "self", ".", "assoc", "=", "assoc", "status", "=", "0x0000", "logger", ".", "debug", "(", "'dicomClass.initAE: Verbindung hergestellt'", ")", "else", ":", "# pragma: no cover", "logger", ".", "warning", "(", "'dicomClass.initAE: Association rejected, aborted or never connected'", ")", "return", "status" ]
[ 231, 4 ]
[ 332, 21 ]
null
python
de
['de', 'de', 'de']
True
true
null
berechne_vorabpauschale_und_bemessungsgrundlage
(para, basiszinsen_feld)
return result
Berechnung der Vorabpauschale und der Bemessungsgrundlage :param para: Eingangsvariablen :type para: dict :return: Ausgangsvariablen :rtype: dict oder None, falls keine Berechnung stattgefunden hat :note: Vorabpauschale VOR Anwendung des Freistellungssatzes :note: Bemessungsgrundlage VOR Anwendung des Steuersatzes
Berechnung der Vorabpauschale und der Bemessungsgrundlage
def berechne_vorabpauschale_und_bemessungsgrundlage(para, basiszinsen_feld): """ Berechnung der Vorabpauschale und der Bemessungsgrundlage :param para: Eingangsvariablen :type para: dict :return: Ausgangsvariablen :rtype: dict oder None, falls keine Berechnung stattgefunden hat :note: Vorabpauschale VOR Anwendung des Freistellungssatzes :note: Bemessungsgrundlage VOR Anwendung des Steuersatzes """ ###### Ist überhaupt etwas zu berechnen? # Abrechnungsjahr vor dem Kauf des Papiers if para[ABRECHNUNGSJAHR] < para[KAUFDATUM].year: return None # Abrechnungsjahr nach dem Verkauf des Papiers if para.get(VERKAUFSDATUM) is not None and para[ABRECHNUNGSJAHR] > para[VERKAUFSDATUM].year: return None result = { VORABPAUSCHALE: 0.0, BEMESSUNGSGRUNDLAGE: 0.0 } # Handelt es sich um einen Verkauf? if para.get(VERKAUFSDATUM) is not None and para[ABRECHNUNGSJAHR] == para[VERKAUFSDATUM].year: # Keine Vorabpauschale im Verkaufsjahr result[VORABPAUSCHALE] = 0.0 result[WERTSTEIGERUNG_GESAMT] = ( para[VERKAUFSKURS] - para[KAUFKURS]) * para[ANZAHL] result[BEMESSUNGSGRUNDLAGE] = \ (result[WERTSTEIGERUNG_GESAMT] + para[AUSSCHUETTUNGEN_IM_JAHR]) \ * (100.0 - para[FREISTELLUNGSSATZ]) / 100.0 \ - para[SUMME_ALTE_VORABPAUSCHALEN] return result # Papier wurde über das Jahresende gehalten result[UNTERJAHR_FAKTOR] = 1.0 if para[ABRECHNUNGSJAHR] == para[KAUFDATUM].year: result[UNTERJAHR_FAKTOR] = (13 - para[KAUFDATUM].month) / 12.0 result[BASISERTRAG] = 0.7 * basiszinsen_feld[para[ABRECHNUNGSJAHR] ] / 100.0 * para[KURS_JAHRESANFANG] * para[ANZAHL] if para.get(KURS_JAHRESENDE) in [None, ""]: # Schätzung auf der Grundlage des Basisertrags und der Ausschüttungen result[FEHLERHINWEIS] = "Schätzung, da kein Jahresendkurs vorhanden" vorabpauschale_1 = result[BASISERTRAG] - para[AUSSCHUETTUNGEN_IM_JAHR] if vorabpauschale_1 < 0: vorabpauschale_1 = 0 else: # Normale Berechnung result[WERTSTEIGERUNG_JAHR] = ( para[KURS_JAHRESENDE] - para[KURS_JAHRESANFANG]) * para[ANZAHL] # Keine Vorabpauschale, falls # keine Wertsteigerung # ODER # Ausschüttungen im Jahr > als Basisertrag if (result[WERTSTEIGERUNG_JAHR] <= 0) or (para[AUSSCHUETTUNGEN_IM_JAHR] >= result[BASISERTRAG]): vorabpauschale_1 = 0.0 else: if result[WERTSTEIGERUNG_JAHR] + para[AUSSCHUETTUNGEN_IM_JAHR] >= result[BASISERTRAG]: vorabpauschale_1 = result[BASISERTRAG] - \ para[AUSSCHUETTUNGEN_IM_JAHR] else: vorabpauschale_1 = result[WERTSTEIGERUNG_JAHR] # weiter verringert: # * ggf. anteilig fürs Jahr # * abzüglich Freistellungssatz result[VORABPAUSCHALE] = vorabpauschale_1 \ * result[UNTERJAHR_FAKTOR] # Bemessungsgrundlage = # verbleibende Vorabpauschale # + Ausschüttungen im Jahr (anzügliche Freistellung) result[BEMESSUNGSGRUNDLAGE] = (result[VORABPAUSCHALE] + para[AUSSCHUETTUNGEN_IM_JAHR]) \ * (100.0 - para[FREISTELLUNGSSATZ]) / 100.0 return result
[ "def", "berechne_vorabpauschale_und_bemessungsgrundlage", "(", "para", ",", "basiszinsen_feld", ")", ":", "###### Ist überhaupt etwas zu berechnen?", "# Abrechnungsjahr vor dem Kauf des Papiers", "if", "para", "[", "ABRECHNUNGSJAHR", "]", "<", "para", "[", "KAUFDATUM", "]", ".", "year", ":", "return", "None", "# Abrechnungsjahr nach dem Verkauf des Papiers", "if", "para", ".", "get", "(", "VERKAUFSDATUM", ")", "is", "not", "None", "and", "para", "[", "ABRECHNUNGSJAHR", "]", ">", "para", "[", "VERKAUFSDATUM", "]", ".", "year", ":", "return", "None", "result", "=", "{", "VORABPAUSCHALE", ":", "0.0", ",", "BEMESSUNGSGRUNDLAGE", ":", "0.0", "}", "# Handelt es sich um einen Verkauf?", "if", "para", ".", "get", "(", "VERKAUFSDATUM", ")", "is", "not", "None", "and", "para", "[", "ABRECHNUNGSJAHR", "]", "==", "para", "[", "VERKAUFSDATUM", "]", ".", "year", ":", "# Keine Vorabpauschale im Verkaufsjahr", "result", "[", "VORABPAUSCHALE", "]", "=", "0.0", "result", "[", "WERTSTEIGERUNG_GESAMT", "]", "=", "(", "para", "[", "VERKAUFSKURS", "]", "-", "para", "[", "KAUFKURS", "]", ")", "*", "para", "[", "ANZAHL", "]", "result", "[", "BEMESSUNGSGRUNDLAGE", "]", "=", "(", "result", "[", "WERTSTEIGERUNG_GESAMT", "]", "+", "para", "[", "AUSSCHUETTUNGEN_IM_JAHR", "]", ")", "*", "(", "100.0", "-", "para", "[", "FREISTELLUNGSSATZ", "]", ")", "/", "100.0", "-", "para", "[", "SUMME_ALTE_VORABPAUSCHALEN", "]", "return", "result", "# Papier wurde über das Jahresende gehalten", "result", "[", "UNTERJAHR_FAKTOR", "]", "=", "1.0", "if", "para", "[", "ABRECHNUNGSJAHR", "]", "==", "para", "[", "KAUFDATUM", "]", ".", "year", ":", "result", "[", "UNTERJAHR_FAKTOR", "]", "=", "(", "13", "-", "para", "[", "KAUFDATUM", "]", ".", "month", ")", "/", "12.0", "result", "[", "BASISERTRAG", "]", "=", "0.7", "*", "basiszinsen_feld", "[", "para", "[", "ABRECHNUNGSJAHR", "]", "]", "/", "100.0", "*", "para", "[", "KURS_JAHRESANFANG", "]", "*", "para", "[", "ANZAHL", "]", "if", "para", ".", "get", "(", "KURS_JAHRESENDE", ")", "in", "[", "None", ",", "\"\"", "]", ":", "# Schätzung auf der Grundlage des Basisertrags und der Ausschüttungen", "result", "[", "FEHLERHINWEIS", "]", "=", "\"Schätzung, da kein Jahresendkurs vorhanden\"", "vorabpauschale_1", "=", "result", "[", "BASISERTRAG", "]", "-", "para", "[", "AUSSCHUETTUNGEN_IM_JAHR", "]", "if", "vorabpauschale_1", "<", "0", ":", "vorabpauschale_1", "=", "0", "else", ":", "# Normale Berechnung", "result", "[", "WERTSTEIGERUNG_JAHR", "]", "=", "(", "para", "[", "KURS_JAHRESENDE", "]", "-", "para", "[", "KURS_JAHRESANFANG", "]", ")", "*", "para", "[", "ANZAHL", "]", "# Keine Vorabpauschale, falls", "# keine Wertsteigerung", "# ODER", "# Ausschüttungen im Jahr > als Basisertrag", "if", "(", "result", "[", "WERTSTEIGERUNG_JAHR", "]", "<=", "0", ")", "or", "(", "para", "[", "AUSSCHUETTUNGEN_IM_JAHR", "]", ">=", "result", "[", "BASISERTRAG", "]", ")", ":", "vorabpauschale_1", "=", "0.0", "else", ":", "if", "result", "[", "WERTSTEIGERUNG_JAHR", "]", "+", "para", "[", "AUSSCHUETTUNGEN_IM_JAHR", "]", ">=", "result", "[", "BASISERTRAG", "]", ":", "vorabpauschale_1", "=", "result", "[", "BASISERTRAG", "]", "-", "para", "[", "AUSSCHUETTUNGEN_IM_JAHR", "]", "else", ":", "vorabpauschale_1", "=", "result", "[", "WERTSTEIGERUNG_JAHR", "]", "# weiter verringert:", "# * ggf. anteilig fürs Jahr", "# * abzüglich Freistellungssatz", "result", "[", "VORABPAUSCHALE", "]", "=", "vorabpauschale_1", "*", "result", "[", "UNTERJAHR_FAKTOR", "]", "# Bemessungsgrundlage =", "# verbleibende Vorabpauschale", "# + Ausschüttungen im Jahr (anzügliche Freistellung)", "result", "[", "BEMESSUNGSGRUNDLAGE", "]", "=", "(", "result", "[", "VORABPAUSCHALE", "]", "+", "para", "[", "AUSSCHUETTUNGEN_IM_JAHR", "]", ")", "*", "(", "100.0", "-", "para", "[", "FREISTELLUNGSSATZ", "]", ")", "/", "100.0", "return", "result" ]
[ 27, 0 ]
[ 116, 17 ]
null
python
de
['de', 'de', 'de']
True
true
null
get_yearspan
(date)
return startdate, enddate
Gibt den ersten und letzten Tag des Jahres zurück in dem `date` liegt >>> get_yearspan(datetime.date(1980, 5, 4)) (datetime.date(1980, 1, 1), datetime.date(1980, 12, 31)) >>> get_yearspan(datetime.date(1986, 3, 11)) (datetime.date(1986, 1, 1), datetime.date(1986, 12, 31))
Gibt den ersten und letzten Tag des Jahres zurück in dem `date` liegt
def get_yearspan(date): """Gibt den ersten und letzten Tag des Jahres zurück in dem `date` liegt >>> get_yearspan(datetime.date(1980, 5, 4)) (datetime.date(1980, 1, 1), datetime.date(1980, 12, 31)) >>> get_yearspan(datetime.date(1986, 3, 11)) (datetime.date(1986, 1, 1), datetime.date(1986, 12, 31)) """ startdate = date_trunc('year', date) enddate = type(startdate)(startdate.year, 12, 31) return startdate, enddate
[ "def", "get_yearspan", "(", "date", ")", ":", "startdate", "=", "date_trunc", "(", "'year'", ",", "date", ")", "enddate", "=", "type", "(", "startdate", ")", "(", "startdate", ".", "year", ",", "12", ",", "31", ")", "return", "startdate", ",", "enddate" ]
[ 149, 0 ]
[ 159, 29 ]
null
python
de
['de', 'de', 'de']
True
true
null
date_now
(values: dict, data: StepData)
Generiert das heutige Datum und gibt es im gewünschten Format aus. Generiert das heutige Datum und gibt es im unter `"format"` angegebenen Format aus. :param values: Werte aus der JSON-Datei :param data: Daten aus der API
Generiert das heutige Datum und gibt es im gewünschten Format aus.
def date_now(values: dict, data: StepData): """Generiert das heutige Datum und gibt es im gewünschten Format aus. Generiert das heutige Datum und gibt es im unter `"format"` angegebenen Format aus. :param values: Werte aus der JSON-Datei :param data: Daten aus der API """ new_key = values["new_key"] value = datetime.now() zeropaded_off = data.get_data(values.get("zeropaded_off", False), values, bool) if zeropaded_off: new_value = value.strftime(data.format(values["format"], values)).lstrip("0").replace(" 0", " ") else: new_value = value.strftime(data.format(values["format"], values)) data.insert_data(new_key, new_value, values)
[ "def", "date_now", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "new_key", "=", "values", "[", "\"new_key\"", "]", "value", "=", "datetime", ".", "now", "(", ")", "zeropaded_off", "=", "data", ".", "get_data", "(", "values", ".", "get", "(", "\"zeropaded_off\"", ",", "False", ")", ",", "values", ",", "bool", ")", "if", "zeropaded_off", ":", "new_value", "=", "value", ".", "strftime", "(", "data", ".", "format", "(", "values", "[", "\"format\"", "]", ",", "values", ")", ")", ".", "lstrip", "(", "\"0\"", ")", ".", "replace", "(", "\" 0\"", ",", "\" \"", ")", "else", ":", "new_value", "=", "value", ".", "strftime", "(", "data", ".", "format", "(", "values", "[", "\"format\"", "]", ",", "values", ")", ")", "data", ".", "insert_data", "(", "new_key", ",", "new_value", ",", "values", ")" ]
[ 352, 0 ]
[ 367, 48 ]
null
python
de
['de', 'de', 'de']
True
true
null
option
(values, data)
return audio_parts(execute_type_option(values, data), data)
Führt die aufgeführten `"audio_parts"`-Funktionen aus, je nachdem ob ein bestimmter Wert `"true"` oder `"false"` ist. Wenn der Wert, der in `"check"` steht `"true"` ist, werden die `"audio_parts"`-Funktionen ausgeführt, die unter `"on_true"` stehen. Wenn der Wert, der in `"check"` steht `"false"` ist, werden die `"audio_parts"`-Funktionen ausgeführt, die unter `"on_false"` stehen. :param values: Werte aus der JSON-Datei :param data: Daten aus der API
Führt die aufgeführten `"audio_parts"`-Funktionen aus, je nachdem ob ein bestimmter Wert `"true"` oder `"false"` ist.
def option(values, data): """Führt die aufgeführten `"audio_parts"`-Funktionen aus, je nachdem ob ein bestimmter Wert `"true"` oder `"false"` ist. Wenn der Wert, der in `"check"` steht `"true"` ist, werden die `"audio_parts"`-Funktionen ausgeführt, die unter `"on_true"` stehen. Wenn der Wert, der in `"check"` steht `"false"` ist, werden die `"audio_parts"`-Funktionen ausgeführt, die unter `"on_false"` stehen. :param values: Werte aus der JSON-Datei :param data: Daten aus der API """ return audio_parts(execute_type_option(values, data), data)
[ "def", "option", "(", "values", ",", "data", ")", ":", "return", "audio_parts", "(", "execute_type_option", "(", "values", ",", "data", ")", ",", "data", ")" ]
[ 74, 0 ]
[ 85, 63 ]
null
python
de
['de', 'de', 'de']
True
true
null
Circle.minmax_from_center
(self, center)
return (dist_min, dist_max)
Die Funktion ermittelt den minimalen und maximalen Abstand vom Center
Die Funktion ermittelt den minimalen und maximalen Abstand vom Center
def minmax_from_center(self, center): """ Die Funktion ermittelt den minimalen und maximalen Abstand vom Center """ d = distance(center, self.center) if np.isclose(d, 0.0): return (self.radius, self.radius) dist_min = abs(d - self.radius) dist_max = d + self.radius return (dist_min, dist_max)
[ "def", "minmax_from_center", "(", "self", ",", "center", ")", ":", "d", "=", "distance", "(", "center", ",", "self", ".", "center", ")", "if", "np", ".", "isclose", "(", "d", ",", "0.0", ")", ":", "return", "(", "self", ".", "radius", ",", "self", ".", "radius", ")", "dist_min", "=", "abs", "(", "d", "-", "self", ".", "radius", ")", "dist_max", "=", "d", "+", "self", ".", "radius", "return", "(", "dist_min", ",", "dist_max", ")" ]
[ 266, 4 ]
[ 275, 35 ]
null
python
de
['de', 'de', 'de']
True
true
null
ImageTaggingCNN._input_conv_layer
(self, input, num_of_kernel)
return MaxPooling2D( pool_size=2, padding='same')(x)
Erzeugt ein Convolution Layer mit einem 7 x 7 Kernel einem Stride von 2 gefolgt von einem Max-Pooling :tensor|list input: Die Tensoren aus vorherigen Layer :int num_of_kernel: Die Anzahl der Kernel (Filter)
Erzeugt ein Convolution Layer mit einem 7 x 7 Kernel einem Stride von 2 gefolgt von einem Max-Pooling
def _input_conv_layer(self, input, num_of_kernel): """ Erzeugt ein Convolution Layer mit einem 7 x 7 Kernel einem Stride von 2 gefolgt von einem Max-Pooling :tensor|list input: Die Tensoren aus vorherigen Layer :int num_of_kernel: Die Anzahl der Kernel (Filter) """ x = ZeroPadding2D( padding=3)(input) x = Conv2D( filters=num_of_kernel, kernel_size=7, strides=2, padding='valid', kernel_initializer='VarianceScaling')(x) x = BatchNormalization( axis=3)(x) x = Activation('relu')(x) return MaxPooling2D( pool_size=2, padding='same')(x)
[ "def", "_input_conv_layer", "(", "self", ",", "input", ",", "num_of_kernel", ")", ":", "x", "=", "ZeroPadding2D", "(", "padding", "=", "3", ")", "(", "input", ")", "x", "=", "Conv2D", "(", "filters", "=", "num_of_kernel", ",", "kernel_size", "=", "7", ",", "strides", "=", "2", ",", "padding", "=", "'valid'", ",", "kernel_initializer", "=", "'VarianceScaling'", ")", "(", "x", ")", "x", "=", "BatchNormalization", "(", "axis", "=", "3", ")", "(", "x", ")", "x", "=", "Activation", "(", "'relu'", ")", "(", "x", ")", "return", "MaxPooling2D", "(", "pool_size", "=", "2", ",", "padding", "=", "'same'", ")", "(", "x", ")" ]
[ 15, 4 ]
[ 34, 30 ]
null
python
de
['de', 'de', 'de']
True
true
null
testBase.tearDown
(self)
wird nach jeden test aufgerufen Returns ------- None.
wird nach jeden test aufgerufen Returns ------- None.
def tearDown(self): ''' wird nach jeden test aufgerufen Returns ------- None. ''' #self.app. # close the browser window #self.driver.quit() pass
[ "def", "tearDown", "(", "self", ")", ":", "#self.app.", "# close the browser window", "#self.driver.quit()", "pass" ]
[ 134, 4 ]
[ 145, 12 ]
null
python
de
['de', 'de', 'de']
True
true
null
delete_resource
(path: str)
Löscht die übergebene Ressource. Verwendet :func:`get_resource_path`, um den Pfad der Ressource zu bekommen. Ist die Ressource nicht vorhanden, wird das ignoriert. Ist der angegebene Pfad allerdings ein Ordner, wird ein Fehler geworfen. :param path: Pfad zur Ressource, relativ zum `resources`-Ordner. :raises: OSError
Löscht die übergebene Ressource.
def delete_resource(path: str): """Löscht die übergebene Ressource. Verwendet :func:`get_resource_path`, um den Pfad der Ressource zu bekommen. Ist die Ressource nicht vorhanden, wird das ignoriert. Ist der angegebene Pfad allerdings ein Ordner, wird ein Fehler geworfen. :param path: Pfad zur Ressource, relativ zum `resources`-Ordner. :raises: OSError """ with contextlib.suppress(FileNotFoundError): os.remove(get_resource_path(path))
[ "def", "delete_resource", "(", "path", ":", "str", ")", ":", "with", "contextlib", ".", "suppress", "(", "FileNotFoundError", ")", ":", "os", ".", "remove", "(", "get_resource_path", "(", "path", ")", ")" ]
[ 320, 0 ]
[ 332, 42 ]
null
python
de
['de', 'de', 'de']
True
true
null
ariaDicomClass.prepareGQA
(self, imagedatas=[], year:int=0, withInfo=True, withResult=False, withDicomData:bool=False )
return data
Auswertung für GQA vorbereiten zusätzlich noch Ergebnisse aus der Datenbank einfügen Benötig config.GQA und config.units - units: ["Linac-1", "Linac-2"], - gqa : dict <testId>: dict <unit>: dict fields: int energyFields: int Parameters ---------- imagedatas : list, optional Auflistungen von Bildinformationen aus der Aria Datenbank. The default is []. year : int, optional DESCRIPTION. The default is 0. withInfo : TYPE, optional alle ImageInfos mit hinzufügen. The default is True. withResult : boolean, optional Testergebnisse mit ausgeben. The default is False. withDicomData : boolean, optional Info pro gerät in dicomfiles ablegen. The default is False. Returns ------- gqa : dict # alles aus config.gqa dabei die Unites mit Daten füllen <testname> info: inaktiv tip anleitung options: TODO: tolerance: <energy> <unit-n> fields: int energyFields: int energy: list
Auswertung für GQA vorbereiten zusätzlich noch Ergebnisse aus der Datenbank einfügen
def prepareGQA(self, imagedatas=[], year:int=0, withInfo=True, withResult=False, withDicomData:bool=False ): """Auswertung für GQA vorbereiten zusätzlich noch Ergebnisse aus der Datenbank einfügen Benötig config.GQA und config.units - units: ["Linac-1", "Linac-2"], - gqa : dict <testId>: dict <unit>: dict fields: int energyFields: int Parameters ---------- imagedatas : list, optional Auflistungen von Bildinformationen aus der Aria Datenbank. The default is []. year : int, optional DESCRIPTION. The default is 0. withInfo : TYPE, optional alle ImageInfos mit hinzufügen. The default is True. withResult : boolean, optional Testergebnisse mit ausgeben. The default is False. withDicomData : boolean, optional Info pro gerät in dicomfiles ablegen. The default is False. Returns ------- gqa : dict # alles aus config.gqa dabei die Unites mit Daten füllen <testname> info: inaktiv tip anleitung options: TODO: tolerance: <energy> <unit-n> fields: int energyFields: int energy: list """ # dicom gerät , name , infos self.dicomfiles = {} units = self.config.units # Dateien im Pfad pdfFiles = [] if osp.exists( self.variables["path"] ): p = Path( self.variables["path"] ) pdfFiles = [i.name for i in p.glob( '*.pdf' )] # files = os.listdir( self.variables["path"] ) data = { "GQA" : self.config.get("GQA").toDict(), "units" : units, "testTags" : {}, "testIds": {} } # nur das gesuchte Jahr, ohne index df_results = self.pd_results.gqa[ self.pd_results.gqa['year'] == year ].reset_index() result_fields = [ "acceptance", "group" ] if withResult: result_fields.append("data") # neuen index setzen # Das geht nur bei daten in df_results if len(df_results.index) > 0: df_results.set_index( df_results.apply(lambda x: f"{x['year']}|{x['unit']}|{x['test']}|{x['energy']}|{x['month']}", axis=1), inplace=True ) data["results"] = df_results[ result_fields ].to_dict( orient="split" ) else: data["results"] = { "columns":result_fields, "data":[], "index":[] } # tags und gqa ids bestimmen for testid, item in self.config.GQA.items(): if "tag" in item: data["testTags"][ item["tag"] ] = testid data["testIds"][ testid ] = item["tag"] tagNotFound = {} inactiv = [] testNotFound = [] for imagedata in imagedatas: # bereitetet die Datenbank Informationen auf info = self.getImageInfos( imagedata ) unit = info["unit"] energy = info["energy"] # # zusätzlich die Daten in self.dicomfiles ablegen # if withDicomData: if not unit in self.dicomfiles: self.dicomfiles[ unit ] = {} # zusätzlich in dicomfiles ablegen self.dicomfiles[ unit ][ info["id"] ] = info # Felder zuordnen, eine Aufnahme kann für mehrere tests verwendet werden # tag für die Datenbank, testid für das PDF for testTag in info["testTags"]: # nur wenn es auch einen test gibt if not testTag in data["testTags"]: tagNotFound[ testTag ] = testTag continue testId = data["testTags"][testTag] # ist der test in gqa nicht erlaubt überspringen # inaktive kann auch einen Text enthalten der beschreibt warum # FIXME: inaktive t = "GQA.{}.info.inaktiv".format( testId ) if not self.config.get(t, False) == False: inactiv.append( self.config.get(t) ) continue # gibt es in GQA passend zum Test dem Gerät und der Energie einen Eintrag t = "GQA.{}.{}.energyFields.{}".format( testId, unit, energy ) energyFields = self.config.get(t, False) if energyFields == False: testNotFound.append( t ) continue # Art des tests MT|JT tagArt = testId[0:2] if tagArt == "JT": dateFlag = "0" else: dateFlag = str( info["AcquisitionMonth"] ) # test_unit = data["GQA"][testId][unit] if not dateFlag in test_unit: test_unit[ dateFlag ] = {} if not energy in test_unit[ dateFlag ]: test_unit[ dateFlag ][energy] = { "counts": 0, "ready": False, "pdfName" : "", "pdf": False, "acceptance" : {} } # Anzahl der Felder für das Datumsflag der jeweiligen Energie erhöhen (counts) test_unit[ dateFlag ][ energy ][ "counts" ] += 1 # auf mid Anzahl prüfen if test_unit[ dateFlag ][ energy ][ "counts" ] >= energyFields: test_unit[ dateFlag ][ energy ][ "ready" ] = True # PDF Dateiname zusammenstellen pdfName = self.config.render_template( self.config["templates"][ "PDF-" + tagArt + "-filename"], { "AcquisitionYear": info["AcquisitionYear"], "AcquisitionMonth": info["AcquisitionMonth"], "unit": unit, "energy": energy, "testId": testId } ) if pdfName in pdfFiles: test_unit[ dateFlag ][ energy ][ "pdfName" ] = pdfName test_unit[ dateFlag ][ energy ][ "pdf" ] = True # nicht gefundene Tags data["inactiv"] = inactiv data["tagNotFound"] = tagNotFound data["testNotFound"] = testNotFound return data
[ "def", "prepareGQA", "(", "self", ",", "imagedatas", "=", "[", "]", ",", "year", ":", "int", "=", "0", ",", "withInfo", "=", "True", ",", "withResult", "=", "False", ",", "withDicomData", ":", "bool", "=", "False", ")", ":", "# dicom gerät , name , infos", "self", ".", "dicomfiles", "=", "{", "}", "units", "=", "self", ".", "config", ".", "units", "# Dateien im Pfad", "pdfFiles", "=", "[", "]", "if", "osp", ".", "exists", "(", "self", ".", "variables", "[", "\"path\"", "]", ")", ":", "p", "=", "Path", "(", "self", ".", "variables", "[", "\"path\"", "]", ")", "pdfFiles", "=", "[", "i", ".", "name", "for", "i", "in", "p", ".", "glob", "(", "'*.pdf'", ")", "]", "# files = os.listdir( self.variables[\"path\"] )", "data", "=", "{", "\"GQA\"", ":", "self", ".", "config", ".", "get", "(", "\"GQA\"", ")", ".", "toDict", "(", ")", ",", "\"units\"", ":", "units", ",", "\"testTags\"", ":", "{", "}", ",", "\"testIds\"", ":", "{", "}", "}", "# nur das gesuchte Jahr, ohne index", "df_results", "=", "self", ".", "pd_results", ".", "gqa", "[", "self", ".", "pd_results", ".", "gqa", "[", "'year'", "]", "==", "year", "]", ".", "reset_index", "(", ")", "result_fields", "=", "[", "\"acceptance\"", ",", "\"group\"", "]", "if", "withResult", ":", "result_fields", ".", "append", "(", "\"data\"", ")", "# neuen index setzen", "# Das geht nur bei daten in df_results", "if", "len", "(", "df_results", ".", "index", ")", ">", "0", ":", "df_results", ".", "set_index", "(", "df_results", ".", "apply", "(", "lambda", "x", ":", "f\"{x['year']}|{x['unit']}|{x['test']}|{x['energy']}|{x['month']}\"", ",", "axis", "=", "1", ")", ",", "inplace", "=", "True", ")", "data", "[", "\"results\"", "]", "=", "df_results", "[", "result_fields", "]", ".", "to_dict", "(", "orient", "=", "\"split\"", ")", "else", ":", "data", "[", "\"results\"", "]", "=", "{", "\"columns\"", ":", "result_fields", ",", "\"data\"", ":", "[", "]", ",", "\"index\"", ":", "[", "]", "}", "# tags und gqa ids bestimmen", "for", "testid", ",", "item", "in", "self", ".", "config", ".", "GQA", ".", "items", "(", ")", ":", "if", "\"tag\"", "in", "item", ":", "data", "[", "\"testTags\"", "]", "[", "item", "[", "\"tag\"", "]", "]", "=", "testid", "data", "[", "\"testIds\"", "]", "[", "testid", "]", "=", "item", "[", "\"tag\"", "]", "tagNotFound", "=", "{", "}", "inactiv", "=", "[", "]", "testNotFound", "=", "[", "]", "for", "imagedata", "in", "imagedatas", ":", "# bereitetet die Datenbank Informationen auf", "info", "=", "self", ".", "getImageInfos", "(", "imagedata", ")", "unit", "=", "info", "[", "\"unit\"", "]", "energy", "=", "info", "[", "\"energy\"", "]", "#", "# zusätzlich die Daten in self.dicomfiles ablegen", "#", "if", "withDicomData", ":", "if", "not", "unit", "in", "self", ".", "dicomfiles", ":", "self", ".", "dicomfiles", "[", "unit", "]", "=", "{", "}", "# zusätzlich in dicomfiles ablegen", "self", ".", "dicomfiles", "[", "unit", "]", "[", "info", "[", "\"id\"", "]", "]", "=", "info", "# Felder zuordnen, eine Aufnahme kann für mehrere tests verwendet werden", "# tag für die Datenbank, testid für das PDF", "for", "testTag", "in", "info", "[", "\"testTags\"", "]", ":", "# nur wenn es auch einen test gibt", "if", "not", "testTag", "in", "data", "[", "\"testTags\"", "]", ":", "tagNotFound", "[", "testTag", "]", "=", "testTag", "continue", "testId", "=", "data", "[", "\"testTags\"", "]", "[", "testTag", "]", "# ist der test in gqa nicht erlaubt überspringen", "# inaktive kann auch einen Text enthalten der beschreibt warum", "# FIXME: inaktive", "t", "=", "\"GQA.{}.info.inaktiv\"", ".", "format", "(", "testId", ")", "if", "not", "self", ".", "config", ".", "get", "(", "t", ",", "False", ")", "==", "False", ":", "inactiv", ".", "append", "(", "self", ".", "config", ".", "get", "(", "t", ")", ")", "continue", "# gibt es in GQA passend zum Test dem Gerät und der Energie einen Eintrag", "t", "=", "\"GQA.{}.{}.energyFields.{}\"", ".", "format", "(", "testId", ",", "unit", ",", "energy", ")", "energyFields", "=", "self", ".", "config", ".", "get", "(", "t", ",", "False", ")", "if", "energyFields", "==", "False", ":", "testNotFound", ".", "append", "(", "t", ")", "continue", "# Art des tests MT|JT", "tagArt", "=", "testId", "[", "0", ":", "2", "]", "if", "tagArt", "==", "\"JT\"", ":", "dateFlag", "=", "\"0\"", "else", ":", "dateFlag", "=", "str", "(", "info", "[", "\"AcquisitionMonth\"", "]", ")", "#", "test_unit", "=", "data", "[", "\"GQA\"", "]", "[", "testId", "]", "[", "unit", "]", "if", "not", "dateFlag", "in", "test_unit", ":", "test_unit", "[", "dateFlag", "]", "=", "{", "}", "if", "not", "energy", "in", "test_unit", "[", "dateFlag", "]", ":", "test_unit", "[", "dateFlag", "]", "[", "energy", "]", "=", "{", "\"counts\"", ":", "0", ",", "\"ready\"", ":", "False", ",", "\"pdfName\"", ":", "\"\"", ",", "\"pdf\"", ":", "False", ",", "\"acceptance\"", ":", "{", "}", "}", "# Anzahl der Felder für das Datumsflag der jeweiligen Energie erhöhen (counts)", "test_unit", "[", "dateFlag", "]", "[", "energy", "]", "[", "\"counts\"", "]", "+=", "1", "# auf mid Anzahl prüfen", "if", "test_unit", "[", "dateFlag", "]", "[", "energy", "]", "[", "\"counts\"", "]", ">=", "energyFields", ":", "test_unit", "[", "dateFlag", "]", "[", "energy", "]", "[", "\"ready\"", "]", "=", "True", "# PDF Dateiname zusammenstellen", "pdfName", "=", "self", ".", "config", ".", "render_template", "(", "self", ".", "config", "[", "\"templates\"", "]", "[", "\"PDF-\"", "+", "tagArt", "+", "\"-filename\"", "]", ",", "{", "\"AcquisitionYear\"", ":", "info", "[", "\"AcquisitionYear\"", "]", ",", "\"AcquisitionMonth\"", ":", "info", "[", "\"AcquisitionMonth\"", "]", ",", "\"unit\"", ":", "unit", ",", "\"energy\"", ":", "energy", ",", "\"testId\"", ":", "testId", "}", ")", "if", "pdfName", "in", "pdfFiles", ":", "test_unit", "[", "dateFlag", "]", "[", "energy", "]", "[", "\"pdfName\"", "]", "=", "pdfName", "test_unit", "[", "dateFlag", "]", "[", "energy", "]", "[", "\"pdf\"", "]", "=", "True", "# nicht gefundene Tags", "data", "[", "\"inactiv\"", "]", "=", "inactiv", "data", "[", "\"tagNotFound\"", "]", "=", "tagNotFound", "data", "[", "\"testNotFound\"", "]", "=", "testNotFound", "return", "data" ]
[ 201, 4 ]
[ 389, 19 ]
null
python
de
['de', 'de', 'de']
True
true
null
ariaDicomClass.getMatrix
( self, output_format="json", params:dict={} )
return style + html
Gibt eine Liste alle Testbeschreibungen (config) mit Anleitungen Parameters ---------- output_format: str Format der Ausgabe [ json, html ] params: dict Aufrufparameter mit year und month Returns ------- str|dict html matrix code oder dict.
Gibt eine Liste alle Testbeschreibungen (config) mit Anleitungen
def getMatrix( self, output_format="json", params:dict={} ): """Gibt eine Liste alle Testbeschreibungen (config) mit Anleitungen Parameters ---------- output_format: str Format der Ausgabe [ json, html ] params: dict Aufrufparameter mit year und month Returns ------- str|dict html matrix code oder dict. """ # jahr und Monat bei 0 mit dem aktuellen belegen today = date.today() if params["year"] == 0: params["year"] = today.year if params["month"] == 0: params["month"] = today.month # pdf wird zum laden der Texte verwendet from isp.mpdf import PdfGenerator as ispPdf pdf = ispPdf() html_jt = "" html_mt = "" html_nn = "" data_dict = {} for key, content in self.config.GQA.items(): data = { "key" : key, "tip" : "", "need" : "", "anleitung" : "", "chips" : "" } chips = [] # units und energy for unit_key, unit in self.config.units.items(): if unit in content: for energy in content[ unit ].energy: chips.append( { "class": "badge badge-pill badge-info mr-1", "content": "{} - {}".format( unit_key, energy ) } ) # info bestimmen info = content.info data["tip"] = info.get("tip", "") need = info.get("need", "") if type(need) == str and need != "": chips.append( { "class": "badge badge-pill badge-success", "content": 'benötigt: ' + need } ) # Anleitung anleitung_filename = info.get("anleitung", "") data["anleitung"] = '<p class="badge badge-pill badge-primary">Anleitung fehlt!</p>' if anleitung_filename != "": anleitung = pdf.textFile(anleitung_filename, render = False) if anleitung: data["anleitung"] = anleitung # Toleranz tolerance = content.info.get("tolerance", False) if tolerance: data["anleitung"] += "<h6>Toleranz</h6>" # ggf formel erstellen for e, item in tolerance.items(): self.prepare_tolerance(key, e) pass # toleranz einfügen data["anleitung"] += '<pre class="toleranz bg-light text-monospace ">' + json.dumps( tolerance, indent=2 ) + '</pre>' # ist der test als inaktiv Hinweis ausgeben inaktiv = content.info.get('inaktiv', False) if inaktiv != False: chips.append( { "class": "inaktiv", "content": 'Inaktiv: ' + inaktiv } ) # gibt es optional Angaben optional = content.info.get('optional', []) if len(optional) > 0: for item in optional: chips.append( { "class": "badge badge-pill badge-primary", "content": 'Optional wenn: ' + item + ' OK' } ) # TODO todo = content.info.get("TODO", False) if todo and len(todo) > 0: data["anleitung"] += "TODO" data["anleitung"] += '<pre class="p-1 bg-warning">' for t in todo: data["anleitung"] += "* " + t + "\n" data["anleitung"] += '</pre>' # markierungen zusammenstellen for chip in chips: data["chips"] += '<div class="{class}">{content}</div>'.format(**chip) data_dict[ key ] = content.toDict() data_dict[ key ][ "anleitung" ] = anleitung card = """ <div class="card m-3" > <div class="card-header"> <span class="font-weight-bolder">{key}</span> <span class="pl-3">{tip}</span> <div class="float-right">{chips}</div> </div> <div class="card-body p-1"> {anleitung} </div> </div> """.format( **data ) if key[0:2] == "JT": html_jt += card elif key[0:2] == "MT": html_mt += card else: html_nn += card if output_format == "json": return data_dict style = """ <style> /* Anpassung pdf text */ .gqa_matrix h2 { font-size: 1.1667em; font-weight: bold; line-height: 1.286em; margin-top: 0.5em; margin-bottom: 0.5em; } .gqa_matrix .card-body p::first-of-type { background-color: #FFFFFFAA; } </style> """ html = ''' <div class="gqa_matrix"> <h1 class="m-0 p-1 text-white bg-secondary" >Angaben für: {month}/{year}</h1> <content class="p-1 d-flex flex-row" > <div class="w-50">{jt}</div> <div class="w-50">{mt}</div> <div class="">{nn}</div> </content> </div> '''.format( jt=html_jt, mt=html_mt, nn=html_nn, **params ) return style + html
[ "def", "getMatrix", "(", "self", ",", "output_format", "=", "\"json\"", ",", "params", ":", "dict", "=", "{", "}", ")", ":", "# jahr und Monat bei 0 mit dem aktuellen belegen", "today", "=", "date", ".", "today", "(", ")", "if", "params", "[", "\"year\"", "]", "==", "0", ":", "params", "[", "\"year\"", "]", "=", "today", ".", "year", "if", "params", "[", "\"month\"", "]", "==", "0", ":", "params", "[", "\"month\"", "]", "=", "today", ".", "month", "# pdf wird zum laden der Texte verwendet", "from", "isp", ".", "mpdf", "import", "PdfGenerator", "as", "ispPdf", "pdf", "=", "ispPdf", "(", ")", "html_jt", "=", "\"\"", "html_mt", "=", "\"\"", "html_nn", "=", "\"\"", "data_dict", "=", "{", "}", "for", "key", ",", "content", "in", "self", ".", "config", ".", "GQA", ".", "items", "(", ")", ":", "data", "=", "{", "\"key\"", ":", "key", ",", "\"tip\"", ":", "\"\"", ",", "\"need\"", ":", "\"\"", ",", "\"anleitung\"", ":", "\"\"", ",", "\"chips\"", ":", "\"\"", "}", "chips", "=", "[", "]", "# units und energy", "for", "unit_key", ",", "unit", "in", "self", ".", "config", ".", "units", ".", "items", "(", ")", ":", "if", "unit", "in", "content", ":", "for", "energy", "in", "content", "[", "unit", "]", ".", "energy", ":", "chips", ".", "append", "(", "{", "\"class\"", ":", "\"badge badge-pill badge-info mr-1\"", ",", "\"content\"", ":", "\"{} - {}\"", ".", "format", "(", "unit_key", ",", "energy", ")", "}", ")", "# info bestimmen", "info", "=", "content", ".", "info", "data", "[", "\"tip\"", "]", "=", "info", ".", "get", "(", "\"tip\"", ",", "\"\"", ")", "need", "=", "info", ".", "get", "(", "\"need\"", ",", "\"\"", ")", "if", "type", "(", "need", ")", "==", "str", "and", "need", "!=", "\"\"", ":", "chips", ".", "append", "(", "{", "\"class\"", ":", "\"badge badge-pill badge-success\"", ",", "\"content\"", ":", "'benötigt: ' ", " ", "eed ", " ", "", "# Anleitung", "anleitung_filename", "=", "info", ".", "get", "(", "\"anleitung\"", ",", "\"\"", ")", "data", "[", "\"anleitung\"", "]", "=", "'<p class=\"badge badge-pill badge-primary\">Anleitung fehlt!</p>'", "if", "anleitung_filename", "!=", "\"\"", ":", "anleitung", "=", "pdf", ".", "textFile", "(", "anleitung_filename", ",", "render", "=", "False", ")", "if", "anleitung", ":", "data", "[", "\"anleitung\"", "]", "=", "anleitung", "# Toleranz", "tolerance", "=", "content", ".", "info", ".", "get", "(", "\"tolerance\"", ",", "False", ")", "if", "tolerance", ":", "data", "[", "\"anleitung\"", "]", "+=", "\"<h6>Toleranz</h6>\"", "# ggf formel erstellen", "for", "e", ",", "item", "in", "tolerance", ".", "items", "(", ")", ":", "self", ".", "prepare_tolerance", "(", "key", ",", "e", ")", "pass", "# toleranz einfügen", "data", "[", "\"anleitung\"", "]", "+=", "'<pre class=\"toleranz bg-light text-monospace \">'", "+", "json", ".", "dumps", "(", "tolerance", ",", "indent", "=", "2", ")", "+", "'</pre>'", "# ist der test als inaktiv Hinweis ausgeben", "inaktiv", "=", "content", ".", "info", ".", "get", "(", "'inaktiv'", ",", "False", ")", "if", "inaktiv", "!=", "False", ":", "chips", ".", "append", "(", "{", "\"class\"", ":", "\"inaktiv\"", ",", "\"content\"", ":", "'Inaktiv: '", "+", "inaktiv", "}", ")", "# gibt es optional Angaben", "optional", "=", "content", ".", "info", ".", "get", "(", "'optional'", ",", "[", "]", ")", "if", "len", "(", "optional", ")", ">", "0", ":", "for", "item", "in", "optional", ":", "chips", ".", "append", "(", "{", "\"class\"", ":", "\"badge badge-pill badge-primary\"", ",", "\"content\"", ":", "'Optional wenn: '", "+", "item", "+", "' OK'", "}", ")", "# TODO", "todo", "=", "content", ".", "info", ".", "get", "(", "\"TODO\"", ",", "False", ")", "if", "todo", "and", "len", "(", "todo", ")", ">", "0", ":", "data", "[", "\"anleitung\"", "]", "+=", "\"TODO\"", "data", "[", "\"anleitung\"", "]", "+=", "'<pre class=\"p-1 bg-warning\">'", "for", "t", "in", "todo", ":", "data", "[", "\"anleitung\"", "]", "+=", "\"* \"", "+", "t", "+", "\"\\n\"", "data", "[", "\"anleitung\"", "]", "+=", "'</pre>'", "# markierungen zusammenstellen", "for", "chip", "in", "chips", ":", "data", "[", "\"chips\"", "]", "+=", "'<div class=\"{class}\">{content}</div>'", ".", "format", "(", "*", "*", "chip", ")", "data_dict", "[", "key", "]", "=", "content", ".", "toDict", "(", ")", "data_dict", "[", "key", "]", "[", "\"anleitung\"", "]", "=", "anleitung", "card", "=", "\"\"\"\n <div class=\"card m-3\" >\n <div class=\"card-header\">\n <span class=\"font-weight-bolder\">{key}</span>\n <span class=\"pl-3\">{tip}</span>\n <div class=\"float-right\">{chips}</div>\n </div>\n <div class=\"card-body p-1\">\n {anleitung}\n </div>\n </div>\n \"\"\"", ".", "format", "(", "*", "*", "data", ")", "if", "key", "[", "0", ":", "2", "]", "==", "\"JT\"", ":", "html_jt", "+=", "card", "elif", "key", "[", "0", ":", "2", "]", "==", "\"MT\"", ":", "html_mt", "+=", "card", "else", ":", "html_nn", "+=", "card", "if", "output_format", "==", "\"json\"", ":", "return", "data_dict", "style", "=", "\"\"\"\n <style>\n /* Anpassung pdf text */\n .gqa_matrix h2 {\n font-size: 1.1667em;\n font-weight: bold;\n line-height: 1.286em;\n margin-top: 0.5em;\n margin-bottom: 0.5em;\n }\n .gqa_matrix .card-body p::first-of-type {\n background-color: #FFFFFFAA;\n }\n </style>\n \"\"\"", "html", "=", "'''\n <div class=\"gqa_matrix\">\n <h1 class=\"m-0 p-1 text-white bg-secondary\" >Angaben für: {month}/{year}</h1>\n <content class=\"p-1 d-flex flex-row\" >\n <div class=\"w-50\">{jt}</div>\n <div class=\"w-50\">{mt}</div>\n <div class=\"\">{nn}</div>\n </content>\n </div>\n '''", ".", "format", "(", "jt", "=", "html_jt", ",", "mt", "=", "html_mt", ",", "nn", "=", "html_nn", ",", "*", "*", "params", ")", "return", "style", "+", "html" ]
[ 520, 4 ]
[ 675, 27 ]
null
python
de
['de', 'de', 'de']
True
true
null
RawData.write_LastSubgifterToDataLog
( self )
return "Kein Subgifter - keine Daten zum Schreiben."
Letzten Subgifter in ein Logfile schreiben
Letzten Subgifter in ein Logfile schreiben
def write_LastSubgifterToDataLog( self ): ''' Letzten Subgifter in ein Logfile schreiben ''' # Daten nur Schreiben, wenn ein LogFile-Verzeichnis angegeben wurde und # nicht, wenn noch kein Subgift in dieser Session vergeben wurde if self.LogFilesPath and not (self.LastSubGifterCounter == 0): tmpText = "User = {0} ( Sub-Gifts = {1} )".format( self.LastSubGifterUserName, self.LastSubGifterCounter ) text = str( '[' + myTime.TimeStampLog() + '] : ' + str( tmpText ) ) # Daten nur Schreiben, wenn des Log-Files-Verzeichnis angegeben wurde if self.LogFilesPath: AppendDataToFile( self.SubGifterLogFile, text ) return "Daten im Logfile eingetragen." return "Kein Subgifter - keine Daten zum Schreiben."
[ "def", "write_LastSubgifterToDataLog", "(", "self", ")", ":", "# Daten nur Schreiben, wenn ein LogFile-Verzeichnis angegeben wurde und\r", "# nicht, wenn noch kein Subgift in dieser Session vergeben wurde\r", "if", "self", ".", "LogFilesPath", "and", "not", "(", "self", ".", "LastSubGifterCounter", "==", "0", ")", ":", "tmpText", "=", "\"User = {0} ( Sub-Gifts = {1} )\"", ".", "format", "(", "self", ".", "LastSubGifterUserName", ",", "self", ".", "LastSubGifterCounter", ")", "text", "=", "str", "(", "'['", "+", "myTime", ".", "TimeStampLog", "(", ")", "+", "'] : '", "+", "str", "(", "tmpText", ")", ")", "# Daten nur Schreiben, wenn des Log-Files-Verzeichnis angegeben wurde\r", "if", "self", ".", "LogFilesPath", ":", "AppendDataToFile", "(", "self", ".", "SubGifterLogFile", ",", "text", ")", "return", "\"Daten im Logfile eingetragen.\"", "return", "\"Kein Subgifter - keine Daten zum Schreiben.\"" ]
[ 125, 4 ]
[ 141, 60 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispTest.test_webapp_base_system
( self )
Webapp Aufruf auf system funktionen
Webapp Aufruf auf system funktionen
def test_webapp_base_system( self ): ''' Webapp Aufruf auf system funktionen ''' response = self.app.get( "api/system" ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") response = self.app.get( "api/system", query_string = { "format" : "html" } ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") response = self.app.get( "api/system/test", query_string = { "zahl" : 12 } ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertDictEqual( response.json["data"], { "_ispcp": {}, "bool": False, "text": "typenlos", "zahl": 12.0}, "Response data nicht OK" ) response = self.app.get( "api/system/15" ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertDictEqual( response.json["data"]["kwargs"], {'format': 'html', 'info': 'kwargs', 'systemId': '15'}, "Response data nicht OK" )
[ "def", "test_webapp_base_system", "(", "self", ")", ":", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/system\"", ")", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Api Status nicht 200\"", ")", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/system\"", ",", "query_string", "=", "{", "\"format\"", ":", "\"html\"", "}", ")", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Api Status nicht 200\"", ")", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/system/test\"", ",", "query_string", "=", "{", "\"zahl\"", ":", "12", "}", ")", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Api Status nicht 200\"", ")", "self", ".", "assertDictEqual", "(", "response", ".", "json", "[", "\"data\"", "]", ",", "{", "\"_ispcp\"", ":", "{", "}", ",", "\"bool\"", ":", "False", ",", "\"text\"", ":", "\"typenlos\"", ",", "\"zahl\"", ":", "12.0", "}", ",", "\"Response data nicht OK\"", ")", "response", "=", "self", ".", "app", ".", "get", "(", "\"api/system/15\"", ")", "self", ".", "assertEqual", "(", "response", ".", "status_code", ",", "200", ",", "\"Api Status nicht 200\"", ")", "self", ".", "assertDictEqual", "(", "response", ".", "json", "[", "\"data\"", "]", "[", "\"kwargs\"", "]", ",", "{", "'format'", ":", "'html'", ",", "'info'", ":", "'kwargs'", ",", "'systemId'", ":", "'15'", "}", ",", "\"Response data nicht OK\"", ")" ]
[ 1398, 4 ]
[ 1422, 9 ]
null
python
de
['de', 'de', 'de']
True
true
null
HeistSystem.RandomMessage_ByType
(self, messageType)
return
Auslesen einer zufälligen Nachricht aus der Datenbank
Auslesen einer zufälligen Nachricht aus der Datenbank
def RandomMessage_ByType(self, messageType): ''' Auslesen einer zufälligen Nachricht aus der Datenbank ''' thisActionName = "RandomMessage_ByType" # Nachrichten aus der Datenbank auslesen messagesList = self.DB_get_MessageText( messageType=messageType ) # Die Liste enthält Nachrichten-Texte if messagesList: # Liste durchmischen random.shuffle(messagesList) # Anzahl der Listenelemente bestimmen listLength = int(len(messagesList)) # Nachrichten-Text übernehmen message = messagesList[self.Parent.GetRandom(0, listLength)] # Rückgabe der zufälligen Nachricht an aufrufende Funktion return message return
[ "def", "RandomMessage_ByType", "(", "self", ",", "messageType", ")", ":", "thisActionName", "=", "\"RandomMessage_ByType\"", "# Nachrichten aus der Datenbank auslesen\r", "messagesList", "=", "self", ".", "DB_get_MessageText", "(", "messageType", "=", "messageType", ")", "# Die Liste enthält Nachrichten-Texte\r", "if", "messagesList", ":", "# Liste durchmischen\r", "random", ".", "shuffle", "(", "messagesList", ")", "# Anzahl der Listenelemente bestimmen\r", "listLength", "=", "int", "(", "len", "(", "messagesList", ")", ")", "# Nachrichten-Text übernehmen\r", "message", "=", "messagesList", "[", "self", ".", "Parent", ".", "GetRandom", "(", "0", ",", "listLength", ")", "]", "# Rückgabe der zufälligen Nachricht an aufrufende Funktion\r", "return", "message", "return" ]
[ 1042, 4 ]
[ 1064, 14 ]
null
python
de
['de', 'de', 'de']
True
true
null
Pipeline.end_time
(self)
return self.__end_time
float: Endzeit der Pipeline. Wird erst nach Beendigung der Pipeline initialisiert.
float: Endzeit der Pipeline. Wird erst nach Beendigung der Pipeline initialisiert.
def end_time(self): """float: Endzeit der Pipeline. Wird erst nach Beendigung der Pipeline initialisiert.""" return self.__end_time
[ "def", "end_time", "(", "self", ")", ":", "return", "self", ".", "__end_time" ]
[ 73, 4 ]
[ 75, 30 ]
null
python
de
['de', 'de', 'de']
True
true
null
steuern
(einkommen)
return steuer
Berechnung der zu zahlenden Steuern fuer ein zu versteuerndes Einkommen von x
Berechnung der zu zahlenden Steuern fuer ein zu versteuerndes Einkommen von x
def steuern(einkommen): """Berechnung der zu zahlenden Steuern fuer ein zu versteuerndes Einkommen von x""" if einkommen <= 8004: steuer = 0 elif einkommen <= 13469: y = (einkommen -8004.0)/10000.0 steuer = (912.17 * y + 1400)*y else: steuer = einkommen * 0.44 - 15694 return steuer
[ "def", "steuern", "(", "einkommen", ")", ":", "if", "einkommen", "<=", "8004", ":", "steuer", "=", "0", "elif", "einkommen", "<=", "13469", ":", "y", "=", "(", "einkommen", "-", "8004.0", ")", "/", "10000.0", "steuer", "=", "(", "912.17", "*", "y", "+", "1400", ")", "*", "y", "else", ":", "steuer", "=", "einkommen", "*", "0.44", "-", "15694", "return", "steuer" ]
[ 38, 0 ]
[ 47, 17 ]
null
python
de
['de', 'de', 'de']
True
true
null
_bezierKurveNFachTeilen
(bezier, n)
Bezierkurve bezier in n gleichlange Abschnitte teilen
Bezierkurve bezier in n gleichlange Abschnitte teilen
def _bezierKurveNFachTeilen(bezier, n): '''Bezierkurve bezier in n gleichlange Abschnitte teilen''' if (n <= 1): return [bezier] else: # beim ersten n-tel aufteilen b1, b2 = _bezierKurveAnPunktTeilen(bezier, 1.0/n) # rest (n-1)-mal aufteilen liste = [b1] liste.extend(_bezierKurveNFachTeilen(b2, n-1)) return liste
[ "def", "_bezierKurveNFachTeilen", "(", "bezier", ",", "n", ")", ":", "if", "(", "n", "<=", "1", ")", ":", "return", "[", "bezier", "]", "else", ":", "# beim ersten n-tel aufteilen", "b1", ",", "b2", "=", "_bezierKurveAnPunktTeilen", "(", "bezier", ",", "1.0", "/", "n", ")", "# rest (n-1)-mal aufteilen", "liste", "=", "[", "b1", "]", "liste", ".", "extend", "(", "_bezierKurveNFachTeilen", "(", "b2", ",", "n", "-", "1", ")", ")", "return", "liste" ]
[ 32, 0 ]
[ 42, 20 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispSAFRS.filter
(cls, filter )
return cls._int_filter( cls.query, filter )
Filterangabe im rql format ausgewerten. immer wenn filter= angegeben wurde wird diese Funktion aufgerufen es wird eine komplexe Filterangabe im rql format ausgewertet Parameters ---------- filter : str RQL Querystring Returns ------- query die query mit zusätzlichem Filter
Filterangabe im rql format ausgewerten.
def filter(cls, filter ): """Filterangabe im rql format ausgewerten. immer wenn filter= angegeben wurde wird diese Funktion aufgerufen es wird eine komplexe Filterangabe im rql format ausgewertet Parameters ---------- filter : str RQL Querystring Returns ------- query die query mit zusätzlichem Filter """ # interne Filterfunktion aufrufen return cls._int_filter( cls.query, filter )
[ "def", "filter", "(", "cls", ",", "filter", ")", ":", "# interne Filterfunktion aufrufen", "return", "cls", ".", "_int_filter", "(", "cls", ".", "query", ",", "filter", ")" ]
[ 1015, 4 ]
[ 1033, 51 ]
null
python
de
['de', 'de', 'de']
True
true
null
normalize_words
(values: dict, data: StepData)
Wörter, die öfter vorkommen und unterschiedliche cases besitzen, werden normalisiert. Eine Liste wird durchlaufen und jedes Wort welches bei zweiten Vorkommen anders geschrieben wurde als das erste vorgekommene wird dann so ersetzt, dass es so geschrieben wird wie das zuerst vorgekommene. Z.B. Bundesliga und bundesliga. Aus bundesliga wird Bundesliga. :param values: Werte aus der JSON-Datei :param data: Daten aus der API :return:
Wörter, die öfter vorkommen und unterschiedliche cases besitzen, werden normalisiert.
def normalize_words(values: dict, data: StepData): """Wörter, die öfter vorkommen und unterschiedliche cases besitzen, werden normalisiert. Eine Liste wird durchlaufen und jedes Wort welches bei zweiten Vorkommen anders geschrieben wurde als das erste vorgekommene wird dann so ersetzt, dass es so geschrieben wird wie das zuerst vorgekommene. Z.B. Bundesliga und bundesliga. Aus bundesliga wird Bundesliga. :param values: Werte aus der JSON-Datei :param data: Daten aus der API :return: """ for idx, key in data.loop_key(values["keys"], values): value = data.get_data(key, values) new_key = get_new_keys(values, idx) already_there = [] new_value = [] for each in value: if each.upper() in already_there: new_value.append(each.upper()) elif each.lower() in already_there: new_value.append(each.lower()) elif each.capitalize() in already_there: new_value.append(each.capitalize()) else: already_there.append(each) new_value.append(each) data.insert_data(new_key, new_value, values)
[ "def", "normalize_words", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "for", "idx", ",", "key", "in", "data", ".", "loop_key", "(", "values", "[", "\"keys\"", "]", ",", "values", ")", ":", "value", "=", "data", ".", "get_data", "(", "key", ",", "values", ")", "new_key", "=", "get_new_keys", "(", "values", ",", "idx", ")", "already_there", "=", "[", "]", "new_value", "=", "[", "]", "for", "each", "in", "value", ":", "if", "each", ".", "upper", "(", ")", "in", "already_there", ":", "new_value", ".", "append", "(", "each", ".", "upper", "(", ")", ")", "elif", "each", ".", "lower", "(", ")", "in", "already_there", ":", "new_value", ".", "append", "(", "each", ".", "lower", "(", ")", ")", "elif", "each", ".", "capitalize", "(", ")", "in", "already_there", ":", "new_value", ".", "append", "(", "each", ".", "capitalize", "(", ")", ")", "else", ":", "already_there", ".", "append", "(", "each", ")", "new_value", ".", "append", "(", "each", ")", "data", ".", "insert_data", "(", "new_key", ",", "new_value", ",", "values", ")" ]
[ 717, 0 ]
[ 744, 52 ]
null
python
de
['de', 'de', 'de']
True
true
null
Pipeline.current_step
(self)
return self.__current_step
int: Aktueller Step der pipeline. Wird erst nach Beendigung der Pipeline initialisiert.
int: Aktueller Step der pipeline. Wird erst nach Beendigung der Pipeline initialisiert.
def current_step(self): """int: Aktueller Step der pipeline. Wird erst nach Beendigung der Pipeline initialisiert.""" return self.__current_step
[ "def", "current_step", "(", "self", ")", ":", "return", "self", ".", "__current_step" ]
[ 83, 4 ]
[ 85, 34 ]
null
python
de
['de', 'de', 'de']
True
true
null
owner.setrank
(self, ctx, member: discord.Member=None, *rankName: str)
Teamler einen Upprank geben Beispiel: ----------- :setrole @Der-Eddy#6508 Member
Teamler einen Upprank geben Beispiel: ----------- :setrole
async def setrank(self, ctx, member: discord.Member=None, *rankName: str): '''Teamler einen Upprank geben Beispiel: ----------- :setrole @Der-Eddy#6508 Member ''' rank = discord.utils.get(ctx.guild.roles, name=' '.join(rankName)) await ctx.message.delete() if member is not None: await member.add_roles(rank) await ctx.send(f':white_check_mark: Teamler **{member.name}** wurde auf **{rank.name}** Uppranked') else: await ctx.send(':no_entry: Du musst einen Benutzer angeben!')
[ "async", "def", "setrank", "(", "self", ",", "ctx", ",", "member", ":", "discord", ".", "Member", "=", "None", ",", "*", "rankName", ":", "str", ")", ":", "rank", "=", "discord", ".", "utils", ".", "get", "(", "ctx", ".", "guild", ".", "roles", ",", "name", "=", "' '", ".", "join", "(", "rankName", ")", ")", "await", "ctx", ".", "message", ".", "delete", "(", ")", "if", "member", "is", "not", "None", ":", "await", "member", ".", "add_roles", "(", "rank", ")", "await", "ctx", ".", "send", "(", "f':white_check_mark: Teamler **{member.name}** wurde auf **{rank.name}** Uppranked'", ")", "else", ":", "await", "ctx", ".", "send", "(", "':no_entry: Du musst einen Benutzer angeben!'", ")" ]
[ 313, 4 ]
[ 325, 73 ]
null
python
de
['de', 'de', 'de']
True
true
null
compare
(values: dict, data: StepData)
Vergleicht zwei Werte miteinander und führt je nachdem ob =, !=, < oder > die "transform"-Typen aus. Wenn `value_left` gleich `value_right`, führe "transform"-Typen aus on_equal durch. Wenn `value_left` ungleich `value_right`, führe "transform"-Typen aus on_not_equal durch. Wenn `value_left` größer `value_right`, führe "transform"-Typen aus on_higher durch. Wenn `value_left` kleiner `value_right`, führe "transform"-Typen aus on_lower durch. :param values: Werte aus der JSON-Datei :param data: Daten aus der API :return:
Vergleicht zwei Werte miteinander und führt je nachdem ob =, !=, < oder > die "transform"-Typen aus.
def compare(values: dict, data: StepData): """Vergleicht zwei Werte miteinander und führt je nachdem ob =, !=, < oder > die "transform"-Typen aus. Wenn `value_left` gleich `value_right`, führe "transform"-Typen aus on_equal durch. Wenn `value_left` ungleich `value_right`, führe "transform"-Typen aus on_not_equal durch. Wenn `value_left` größer `value_right`, führe "transform"-Typen aus on_higher durch. Wenn `value_left` kleiner `value_right`, führe "transform"-Typen aus on_lower durch. :param values: Werte aus der JSON-Datei :param data: Daten aus der API :return: """ values["transform"] = execute_type_compare(values, data) transform(values, data)
[ "def", "compare", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "values", "[", "\"transform\"", "]", "=", "execute_type_compare", "(", "values", ",", "data", ")", "transform", "(", "values", ",", "data", ")" ]
[ 458, 0 ]
[ 472, 27 ]
null
python
de
['de', 'de', 'de']
True
true
null
StepData.loop_array
(self, loop_root: list, values: dict)
return map(lambda value: self.save_loop(value[0], value[1], values), enumerate(loop_root))
Zum Durchlaufen eines Arrays. Setzt bei jedem Durchlauf die Variablen `_loop` und `_idx`. `_loop` entspricht dem aktuellen Wert und `_idx` dem aktuellen Index. :param loop_root: Array, das durchlaufen werden soll. :param values: Werte aus der JSON-Datei :return: Iterator über das Array, welcher Seiteneffekte besitzt, mit (idx, value). :rtype: map
Zum Durchlaufen eines Arrays.
def loop_array(self, loop_root: list, values: dict): """ Zum Durchlaufen eines Arrays. Setzt bei jedem Durchlauf die Variablen `_loop` und `_idx`. `_loop` entspricht dem aktuellen Wert und `_idx` dem aktuellen Index. :param loop_root: Array, das durchlaufen werden soll. :param values: Werte aus der JSON-Datei :return: Iterator über das Array, welcher Seiteneffekte besitzt, mit (idx, value). :rtype: map """ return map(lambda value: self.save_loop(value[0], value[1], values), enumerate(loop_root))
[ "def", "loop_array", "(", "self", ",", "loop_root", ":", "list", ",", "values", ":", "dict", ")", ":", "return", "map", "(", "lambda", "value", ":", "self", ".", "save_loop", "(", "value", "[", "0", "]", ",", "value", "[", "1", "]", ",", "values", ")", ",", "enumerate", "(", "loop_root", ")", ")" ]
[ 60, 4 ]
[ 71, 98 ]
null
python
de
['de', 'de', 'de']
True
true
null
FSImage.__init__
(self, pathOrData=None, **kwargs )
Erweitert PFDicomImage um die eigene DicomImage Klasse
Erweitert PFDicomImage um die eigene DicomImage Klasse
def __init__(self, pathOrData=None, **kwargs ): """ Erweitert PFDicomImage um die eigene DicomImage Klasse """ # die eigene Erweiterung DicomImage.__init__( self, pathOrData )
[ "def", "__init__", "(", "self", ",", "pathOrData", "=", "None", ",", "*", "*", "kwargs", ")", ":", "# die eigene Erweiterung", "DicomImage", ".", "__init__", "(", "self", ",", "pathOrData", ")" ]
[ 54, 4 ]
[ 59, 47 ]
null
python
de
['de', 'de', 'de']
True
true
null
GUI.setzen_analoge_eingaenge_1
(self, eintrag_neu)
Analoge Eingänge setzen - Testen der Eingabe
Analoge Eingänge setzen - Testen der Eingabe
def setzen_analoge_eingaenge_1(self, eintrag_neu): """ Analoge Eingänge setzen - Testen der Eingabe""" try: float(eintrag_neu) self.setzen_analoge_eingaenge_2() return True except ValueError: messagebox.showerror(message="Kein gültiger Analogwert!", title="SimSTB Fehlermeldung") self.aktualisieren_eingangswerte() return False
[ "def", "setzen_analoge_eingaenge_1", "(", "self", ",", "eintrag_neu", ")", ":", "try", ":", "float", "(", "eintrag_neu", ")", "self", ".", "setzen_analoge_eingaenge_2", "(", ")", "return", "True", "except", "ValueError", ":", "messagebox", ".", "showerror", "(", "message", "=", "\"Kein gültiger Analogwert!\",", " ", "itle=", "\"", "SimSTB Fehlermeldung\")", "", "self", ".", "aktualisieren_eingangswerte", "(", ")", "return", "False" ]
[ 237, 4 ]
[ 246, 24 ]
null
python
de
['de', 'de', 'de']
True
true
null
length
(values: dict, data: StepData)
Gibt die Länge von Arrays (Listen), Strings, Tupeln und Dictionaries aus. :param values: Werte aus der JSON-Datei :param data: Daten aus der API :return:
Gibt die Länge von Arrays (Listen), Strings, Tupeln und Dictionaries aus.
def length(values: dict, data: StepData): """Gibt die Länge von Arrays (Listen), Strings, Tupeln und Dictionaries aus. :param values: Werte aus der JSON-Datei :param data: Daten aus der API :return: """ for idx, key in data.loop_key(values["keys"], values): value = data.get_data(key, values) new_key = get_new_keys(values, idx) data.insert_data(new_key, len(value), values)
[ "def", "length", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "for", "idx", ",", "key", "in", "data", ".", "loop_key", "(", "values", "[", "\"keys\"", "]", ",", "values", ")", ":", "value", "=", "data", ".", "get_data", "(", "key", ",", "values", ")", "new_key", "=", "get_new_keys", "(", "values", ",", "idx", ")", "data", ".", "insert_data", "(", "new_key", ",", "len", "(", "value", ")", ",", "values", ")" ]
[ 619, 0 ]
[ 630, 53 ]
null
python
de
['de', 'de', 'de']
True
true
null
select
(values: dict, data: StepData)
Entfernt alle Keys, die nicht in `"relevant_keys"` stehen aus dem Dictionary. :param values: Werte aus der JSON-Datei :param data: Daten aus der API
Entfernt alle Keys, die nicht in `"relevant_keys"` stehen aus dem Dictionary.
def select(values: dict, data: StepData): """Entfernt alle Keys, die nicht in `"relevant_keys"` stehen aus dem Dictionary. :param values: Werte aus der JSON-Datei :param data: Daten aus der API """ root = values.get("_loop_states", {}).get("_loop", None) if root is None: # If root is data root old_root = dict(data.data) data.clear_data() root = data.data else: old_root = dict(root) root.clear() for key in values["relevant_keys"]: try: data_insert_pattern(key, root, data_get_pattern(key, old_root)) except: if values.get("ignore_errors", False) is False: raise
[ "def", "select", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "root", "=", "values", ".", "get", "(", "\"_loop_states\"", ",", "{", "}", ")", ".", "get", "(", "\"_loop\"", ",", "None", ")", "if", "root", "is", "None", ":", "# If root is data root", "old_root", "=", "dict", "(", "data", ".", "data", ")", "data", ".", "clear_data", "(", ")", "root", "=", "data", ".", "data", "else", ":", "old_root", "=", "dict", "(", "root", ")", "root", ".", "clear", "(", ")", "for", "key", "in", "values", "[", "\"relevant_keys\"", "]", ":", "try", ":", "data_insert_pattern", "(", "key", ",", "root", ",", "data_get_pattern", "(", "key", ",", "old_root", ")", ")", "except", ":", "if", "values", ".", "get", "(", "\"ignore_errors\"", ",", "False", ")", "is", "False", ":", "raise" ]
[ 86, 0 ]
[ 108, 21 ]
null
python
de
['de', 'de', 'de']
True
true
null
RawData.write_RawDataLog
( self, data )
return
Schreibt die Raw-Daten in ein Logfile
Schreibt die Raw-Daten in ein Logfile
def write_RawDataLog( self, data ): ''' Schreibt die Raw-Daten in ein Logfile ''' thisActionName = "write_RawDataLog" # Daten nur Schreiben, wenn des Log-Files-Verzeichnis angegeben wurde if self.LogFilesPath: # Alles ins Log ausser "Ping"- und "Pong"-Nachrichten if ( ( not "PING" in data.RawData ) and ( not "PONG" in data.RawData ) and ( not "JOIN" in data.RawData ) and ( not "PART" in data.RawData ) ): text = str( '[' + myTime.TimeStampLog() + '] : ' + str( data.RawData ) ) # Schreibe Daten in Logfile AppendDataToFile( self.RawLogFile, text ) return
[ "def", "write_RawDataLog", "(", "self", ",", "data", ")", ":", "thisActionName", "=", "\"write_RawDataLog\"", "# Daten nur Schreiben, wenn des Log-Files-Verzeichnis angegeben wurde\r", "if", "self", ".", "LogFilesPath", ":", "# Alles ins Log ausser \"Ping\"- und \"Pong\"-Nachrichten\r", "if", "(", "(", "not", "\"PING\"", "in", "data", ".", "RawData", ")", "and", "(", "not", "\"PONG\"", "in", "data", ".", "RawData", ")", "and", "(", "not", "\"JOIN\"", "in", "data", ".", "RawData", ")", "and", "(", "not", "\"PART\"", "in", "data", ".", "RawData", ")", ")", ":", "text", "=", "str", "(", "'['", "+", "myTime", ".", "TimeStampLog", "(", ")", "+", "'] : '", "+", "str", "(", "data", ".", "RawData", ")", ")", "# Schreibe Daten in Logfile\r", "AppendDataToFile", "(", "self", ".", "RawLogFile", ",", "text", ")", "return" ]
[ 90, 4 ]
[ 105, 14 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispBase.check_acceptance_ext
( self, df, md={}, check:list=[], withSoll=False )
return df, acceptance
Überprüft die angegebenen Felder auf die Erfüllung der Toleranz Angaben Erweitert das Dataframe um <field>_acceptance und <field>_passed (Icon) gibt das gesamt Ergebnis zurück Parameters ---------- df : DataFrame Pandas DataFrame mit den in 'field' angegebenen Feldern md : dict metadata mit tolerance angaben für die Energie in md["energy"] check : list with dict field: str Name des Feldes in md tolerance: str Bezeichner des toleranz Bereichs in md.current.tolerance wird tolerance nicht angegeben wird default verwendet query: str Filter für Felder bei denen der check angewandt wird withSoll: boolean true - zusätzlich value einfügen wenn in toleranz angegeben Returns ------- df : DataFrame entweder das übergebene oder das neu erstellte bei angabe von query int : acceptance aller Felder 5-ok, 3-warn, 1-error 0-bei nan
Überprüft die angegebenen Felder auf die Erfüllung der Toleranz Angaben Erweitert das Dataframe um <field>_acceptance und <field>_passed (Icon) gibt das gesamt Ergebnis zurück
def check_acceptance_ext( self, df, md={}, check:list=[], withSoll=False ): """Überprüft die angegebenen Felder auf die Erfüllung der Toleranz Angaben Erweitert das Dataframe um <field>_acceptance und <field>_passed (Icon) gibt das gesamt Ergebnis zurück Parameters ---------- df : DataFrame Pandas DataFrame mit den in 'field' angegebenen Feldern md : dict metadata mit tolerance angaben für die Energie in md["energy"] check : list with dict field: str Name des Feldes in md tolerance: str Bezeichner des toleranz Bereichs in md.current.tolerance wird tolerance nicht angegeben wird default verwendet query: str Filter für Felder bei denen der check angewandt wird withSoll: boolean true - zusätzlich value einfügen wenn in toleranz angegeben Returns ------- df : DataFrame entweder das übergebene oder das neu erstellte bei angabe von query int : acceptance aller Felder 5-ok, 3-warn, 1-error 0-bei nan """ # Felder die für einen gesammelten check verwendet werden fullCheck = {} # dataframes und sammlung von dataframes bei query angabe dfs = [] for ci in check: # bei query Angabe nur passende verwenden if "query" in ci: query = ci['query'] qdf = df.query( query ) qdf[ "_query" ] = query else: qdf = df if not "field" in ci: # in der liste der dataframes anfügen dfs.append( qdf ) continue tolerance = "default" if "tolerance" in ci: tolerance = ci['tolerance'] if withSoll: try: sollValue = md.current.tolerance.get( tolerance ).get("soll").get("value", np.nan) except: sollValue = np.nan if sollValue: df[ ci['field'] + "_soll" ] = sollValue # ein zusätzliches Feld <field>_acceptance anlegen und füllen # apply calls check_tolerance with args on each row (axis=1) qdf[ ci['field'] + "_acceptance" ] = np.nan qdf = qdf.apply( lambda r: self.check_tolerance_ext( r, ci, md.current.tolerance, tolerance ), axis=1, result_type='expand' ) # und ein zusätzliches feld <field>_passed anlegen und füllen qdf[ ci['field'] + "_passed" ] = qdf[ ci['field'] + "_acceptance" ].apply( self.getIcon ) # das feld in fullCheck merken fullCheck[ ci['field'] + "_acceptance" ] = ci['field'] + "_acceptance" #print( "# check_acceptance_ext-qdf", qdf ) # in der liste der dataframes anfügen dfs.append( qdf ) # alle teile des dataframes zusammenfassen df = pd.concat( dfs ) # print( "check_acceptance_ext df", df) # minimun des fullcheck ermitteln minAll = df[ fullCheck ].min(axis=None, skipna=True) acceptance = minAll.min() return df, acceptance
[ "def", "check_acceptance_ext", "(", "self", ",", "df", ",", "md", "=", "{", "}", ",", "check", ":", "list", "=", "[", "]", ",", "withSoll", "=", "False", ")", ":", "# Felder die für einen gesammelten check verwendet werden", "fullCheck", "=", "{", "}", "# dataframes und sammlung von dataframes bei query angabe", "dfs", "=", "[", "]", "for", "ci", "in", "check", ":", "# bei query Angabe nur passende verwenden", "if", "\"query\"", "in", "ci", ":", "query", "=", "ci", "[", "'query'", "]", "qdf", "=", "df", ".", "query", "(", "query", ")", "qdf", "[", "\"_query\"", "]", "=", "query", "else", ":", "qdf", "=", "df", "if", "not", "\"field\"", "in", "ci", ":", "# in der liste der dataframes anfügen", "dfs", ".", "append", "(", "qdf", ")", "continue", "tolerance", "=", "\"default\"", "if", "\"tolerance\"", "in", "ci", ":", "tolerance", "=", "ci", "[", "'tolerance'", "]", "if", "withSoll", ":", "try", ":", "sollValue", "=", "md", ".", "current", ".", "tolerance", ".", "get", "(", "tolerance", ")", ".", "get", "(", "\"soll\"", ")", ".", "get", "(", "\"value\"", ",", "np", ".", "nan", ")", "except", ":", "sollValue", "=", "np", ".", "nan", "if", "sollValue", ":", "df", "[", "ci", "[", "'field'", "]", "+", "\"_soll\"", "]", "=", "sollValue", "# ein zusätzliches Feld <field>_acceptance anlegen und füllen", "# apply calls check_tolerance with args on each row (axis=1)", "qdf", "[", "ci", "[", "'field'", "]", "+", "\"_acceptance\"", "]", "=", "np", ".", "nan", "qdf", "=", "qdf", ".", "apply", "(", "lambda", "r", ":", "self", ".", "check_tolerance_ext", "(", "r", ",", "ci", ",", "md", ".", "current", ".", "tolerance", ",", "tolerance", ")", ",", "axis", "=", "1", ",", "result_type", "=", "'expand'", ")", "# und ein zusätzliches feld <field>_passed anlegen und füllen", "qdf", "[", "ci", "[", "'field'", "]", "+", "\"_passed\"", "]", "=", "qdf", "[", "ci", "[", "'field'", "]", "+", "\"_acceptance\"", "]", ".", "apply", "(", "self", ".", "getIcon", ")", "# das feld in fullCheck merken", "fullCheck", "[", "ci", "[", "'field'", "]", "+", "\"_acceptance\"", "]", "=", "ci", "[", "'field'", "]", "+", "\"_acceptance\"", "#print( \"# check_acceptance_ext-qdf\", qdf )", "# in der liste der dataframes anfügen", "dfs", ".", "append", "(", "qdf", ")", "# alle teile des dataframes zusammenfassen", "df", "=", "pd", ".", "concat", "(", "dfs", ")", "# print( \"check_acceptance_ext df\", df)", "# minimun des fullcheck ermitteln", "minAll", "=", "df", "[", "fullCheck", "]", ".", "min", "(", "axis", "=", "None", ",", "skipna", "=", "True", ")", "acceptance", "=", "minAll", ".", "min", "(", ")", "return", "df", ",", "acceptance" ]
[ 469, 4 ]
[ 566, 29 ]
null
python
de
['de', 'de', 'de']
True
true
null
RoleReactClient.on_raw_reaction_remove
(self, payload)
Dir wurde die Rolle erfolgreich weggenommen...
Dir wurde die Rolle erfolgreich weggenommen...
async def on_raw_reaction_remove(self, payload): """Dir wurde die Rolle erfolgreich weggenommen...""" # Make sure that the message the user is reacting to is the one we care about if payload.message_id != self.role_message_id: return try: role_id = self.emoji_to_role[payload.emoji] except KeyError: # If the emoji isn't the one we care about then exit as well. return guild = self.get_guild(payload.guild_id) if guild is None: # Check if we're still in the guild and it's cached. return role = guild.get_role(role_id) if role is None: # Make sure the role still exists and is valid. return member = guild.get_member(payload.user_id) if member is None: # Makes sure the member still exists and is valid return try: # Finally, remove the role await member.remove_roles(role) except discord.HTTPException: # If we want to do something in case of errors we'd do it here. pass
[ "async", "def", "on_raw_reaction_remove", "(", "self", ",", "payload", ")", ":", "# Make sure that the message the user is reacting to is the one we care about", "if", "payload", ".", "message_id", "!=", "self", ".", "role_message_id", ":", "return", "try", ":", "role_id", "=", "self", ".", "emoji_to_role", "[", "payload", ".", "emoji", "]", "except", "KeyError", ":", "# If the emoji isn't the one we care about then exit as well.", "return", "guild", "=", "self", ".", "get_guild", "(", "payload", ".", "guild_id", ")", "if", "guild", "is", "None", ":", "# Check if we're still in the guild and it's cached.", "return", "role", "=", "guild", ".", "get_role", "(", "role_id", ")", "if", "role", "is", "None", ":", "# Make sure the role still exists and is valid.", "return", "member", "=", "guild", ".", "get_member", "(", "payload", ".", "user_id", ")", "if", "member", "is", "None", ":", "# Makes sure the member still exists and is valid", "return", "try", ":", "# Finally, remove the role", "await", "member", ".", "remove_roles", "(", "role", ")", "except", "discord", ".", "HTTPException", ":", "# If we want to do something in case of errors we'd do it here.", "pass" ]
[ 49, 4 ]
[ 81, 16 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispSAFRS_decorator
( fn )
return wrapped_fn
Api Aufrufe vorbereiten und durchführen. Im decorator wird wrapped_fn() aufgerufen um die Anfrage zu verarbeiten JSON:API Response formatting follows filter -> sort -> paginate Parameters ---------- fn : safrs/jsonapi.py|SAFRSRestAPI Returns ------- wrapped_fn : func
Api Aufrufe vorbereiten und durchführen.
def ispSAFRS_decorator( fn ): """Api Aufrufe vorbereiten und durchführen. Im decorator wird wrapped_fn() aufgerufen um die Anfrage zu verarbeiten JSON:API Response formatting follows filter -> sort -> paginate Parameters ---------- fn : safrs/jsonapi.py|SAFRSRestAPI Returns ------- wrapped_fn : func """ @wraps(fn) def wrapped_fn(*args, **kwargs): """Funktion zum verarbeiten eine API Anfrage. Arten von fn - safrs/jsonapi.py GET /api/<modul> - SAFRSRestAPI.get GET /api/<modul>/11 - SAFRSRestAPI.get GET /api/<modul>/groupby/? - SAFRSJSONRPCAPI GET /api/<modul>/items_geraet/? - SAFRSJSONRPCAPI SAFRSRestAPI - results - __qualname__ .get - jsonify(result) .post - response .patch - response .delete - {}, HTTPStatus.NO_CONTENT SAFRSJSONRPCAPI - safrs/jsonapi.py - __qualname__ .get - jsonify(result) .post - response SAFRSRestRelationshipAPI - safrs/jsonapi.py - __qualname__ .get - jsonify(result) .patch - response .post - {}, HTTPStatus.NO_CONTENT - we can return result too but it's not necessary per the spec .delete - {}, HTTPStatus.NO_CONTENT SAFRSRestAPI:: das Vorhandensein einer _s_object_id unterscheidet zwischen dem holen eines Datensatzes oder einer Liste id = kwargs.get( fn.SAFRSObject._s_object_id , None) Parameters ---------- *args : tuple mit einem object .. code:: /api/gqadb/?zahl=12 - ( safrs._api.gqadb_API, ) - {} /api/gqadb/2020?zahl=12 - ( safrs._api.gqadb_API, ) - {'gqadbId': '2020'} /api/gqadb/test?zahl=12 - ( safrs._api.method_gqadb_test, ) - {} /api/gqa?zahl=12 - ( safrs._api.gqa_API, ) - {} /api/gqa/2020?zahl=12 - ( safrs._api.gqa_API, ) - {'gqaId': '2020'} /api/gqa/test?zahl=12 - ( safrs._api.gqa_API, ) - {'gqaId': 'test'} **kwargs : dict beliebige Parameter. Returns ------- result : result ResultObject. Tests:: log - q, request.endpoint, safrs_obj._s_object_id, json.dumps(kwargs) /api/gqadb - ['SAFRSRestAPI', 'get'] - api.gqadb - gqadbId - {} /api/gqadb/2020 - ['SAFRSRestAPI', 'get'] - api.gqadbId - gqadbId - {"gqadbId": "2020"} /api/gqadb/test - ['SAFRSJSONRPCAPI', 'get'] - api.gqadb.test - gqadbId - {} /api/gqa - ['SAFRSRestAPI', 'get'] - api.gqa - gqaId - {} /api/gqa/2020 - ['SAFRSRestAPI', 'get'] - api.gqaId - gqaId - {"gqaId": "2020"} /api/gqa/test - ['SAFRSRestAPI', 'get'] - api.gqaId - gqaId - {"gqaId": "test"} bei /api/gqadb/test fn.__name__ - get fn.__module__ - safrs.jsonapi request.endpoint - api.gqadb.test safrs_obj.__module__ - app.db safrs_obj.__name__ - gqadb safrs_obj._api.prefix - /api """ # das verwendete Object bestimmen um SAFRSRestRelationshipAPI zu erkennen q = fn.__qualname__.split(".") # die Aufruf Methode: get,post,patch,delete method = q[1] # mit SAFRSRestRelationshipAPI dessen target verwenden if q[0] == "SAFRSRestRelationshipAPI": # eine SAFRSRestRelationshipAPI hat keine _int_init funktion result = fn(*args, **kwargs) # zum weiterarbeiten das _target Object verwenden safrs_obj = fn.SAFRSObject._target else: safrs_obj = fn.SAFRSObject # # Das SAFRSObject vor jedem Aufruf Vorbereiten # safrs_obj._int_init( ) # func_name wird für den _int_call aufruf benötigt # - da ein Aufruf von fn() request parameter # und nicht kwargs an die Aufzurufende Funktion weitergibt # wird bei Angabe von func_name die Funktion mit kwargs uber _int_call aufgerufen func_name = None swagger_path = "" #print("wrapped_fn", q ) # nur bei get parameter prüfen if method == "get": # Merker für Variante b: objectId wird später wieder eingefügt objectId = None # Argumente parsen doArgParse = False # sonderbehandlung bei request.endpoint /api/gqa/<func> # ist api.gqaId - sollte aber api.gqa.<func> sein # der letzte Teil von request.path ist safrs_obj._s_object_id in kwargs und eine funktion name = safrs_obj.__name__.lower() # nur bei einer eigener Erweiterung ohne Datenbank if hasattr( fn.SAFRSObject, "no_flask_admin") and fn.SAFRSObject.no_flask_admin == True: ''' Möglichkeiten: a) /class/ : api_list in class aufrufen b) /class/{objectId}/ : keine Funktion objectId vorhanden also api_get aufrufen c) /class/test : Vorhandene Funktion test in class aufrufen, objectId (test) aus den Parametern entfernen ''' doArgParse = True variante = "" # swagger_path zuerst nur der Modulname swagger_path = "/{}".format(name) #print( "wrapped_fn", swagger_path, safrs_obj._s_object_id, kwargs ) func = None # b, c) gibt es eine passende jsonapi_rpc methode if safrs_obj._s_object_id in kwargs: #log.warning("safrs_obj.object_id in kwargs") # auf möglichkeit c) testen try: func = getattr(safrs_obj, kwargs[ safrs_obj._s_object_id ], None) variante = "c" except: # pragma: no cover # getattr gibt einen Fehler bei query log.warning("keine func: getattr gibt einen Fehler bei query") pass # also b) verwenden if not func: try: func = getattr(safrs_obj, 'api_get', None) if func: variante = "b" except: # pragma: no cover # getattr gibt einen Fehler bei query log.warning("no func: getattr gibt einen Fehler bei query") pass else: # ohne object_id Möglichkeit a) try: func = getattr(safrs_obj, "api_list", None) if func: variante = "a" except: # pragma: no cover # getattr gibt einen Fehler bei query log.warning("no func: getattr gibt einen Fehler bei query") pass #log.warning("{} and __rest_doc variante: {}".format(func, variante) ) # wurde eine Funktion gefunden und hat sie ein __rest_doc dann auswerten if func and hasattr(func, '__rest_doc'): func_name = func.__name__ if variante == "a": swagger_path = "/{}/".format( name ) elif variante == "b": # objectId merken objectId = kwargs[ safrs_obj._s_object_id ] swagger_path = "/{}/{}/".format( name, "{" + safrs_obj._s_object_id + "}" ) elif variante == "c": swagger_path = "/{}/{}".format(name, func_name ) else: # es gibt keine passende Funktion also Fehler anzeigen status_code = 400 message = "Funktion nicht gefunden" safrs_obj.appError( "{}".format( message ), str( status_code ) ) result = jsonify( {} ) result.status_code = status_code return result elif q[0] == "SAFRSJSONRPCAPI": # dieser Bereich wird in db bei groupby, undefined oder funktionen aufgerufen doArgParse = True # den request endpoint bestimmen - wird benötigt um Swagger parameter zu prüfen # der erste teil ist immer api der letzte Teil die aufzurufende Funktion ep_list = request.endpoint.split(".") func_name = ep_list[-1] # bei diesem swagger_path = "/{}/{}".format(name, ep_list[-1]) else: # einfach durchlaufen ohne die Argumente zu prüfen # SAFRSRestRelationshipAPI - get - dbtestsrelId - {"dbtestsId": "2"} doArgParse = False # nur in swagger abgelegte paramter verwenden und ggf umwandeln # in safrs methoden selbst wird args = dict(request.args) verwendet # _int_parse_args entfernt _s_object_id if doArgParse: kwargs = safrs_obj._int_parse_args( kwargs, method, swagger_path ) # gemerkte objectId wieder einfügen if objectId: kwargs[ safrs_obj._s_object_id ] = objectId elif method == "post": pass request.groups = {} # Parse the jsonapi groups and groups[] args for arg, val in request.args.items(): # https://jsonapi.org/format/#fetching-sparse-fieldsets groups_attr = re.search(r"groups\[(\w+)\]", arg) if groups_attr: group_type = groups_attr.group(1) request.groups[group_type] = val.split(",") elif arg == "groups": # groups ohne andere tabelle verwendet die aktuelle tabelle request.groups[ safrs_obj.__name__ ] = val.split(",") # funktion in der Klasse ausführen sonst fn selbst if func_name: # gewünschte Funktion in fn.SAFRSObject aufrufen meth = fn.SAFRSObject meth.appInfo( "safrs", "Funktion: {}.{}.{}()".format( meth.__module__, meth.__name__, func_name ) ) if hasattr(meth, func_name): if func_name[:4] == "api_": # api_ Funktionen benötigen die Klasse selbst als ersten parameter result = getattr( meth, func_name )( meth, **kwargs ) else: result = getattr( meth, func_name )( **kwargs ) else: # pragma: no cover # kann eigentlich nicht passieren da oberhalb gestetet wird meth.appError( "ispSAFRSDummy", "Fehlende Funktion: {}.{}.{}()".format( meth.__module__, meth.__name__, func_name ) ) result = meth._int_json_response( {} ) # abfangen das result auch etwas anderes als ein dict sein kann (z.b. html, pdf ...) if not type( result ) in [dict, list, SAFRSFormattedResponse]: return result try: result = jsonify( result ) except Exception as exc: # pragma: no cover status_code = getattr(exc, "status_code", 500) message = getattr(exc, "message", "unbekannter Fehler") safrs_obj.appError( "{} - {}".format( func_name, message ), str( status_code ) ) result = jsonify( {} ) else: # # die ursprüngliche Funktion aufrufen # # print(args) # log.error("wrapped_fn - post:" ) # print("wrapped_fn", q, args, kwargs, fn ) status_code = 200 try: result = fn(*args, **kwargs) except (ValidationError, GenericError, NotFoundError) as exc: # pragma: no cover status_code = getattr(exc, "status_code", 500) message = getattr(exc, "message", "") except werkzeug.exceptions.NotFound: status_code = 404 message = "Not Found" except Exception as exc: # pragma: no cover status_code = getattr(exc, "status_code", 500) message = getattr(exc, "message", "unbekannter Fehler") # gab es einen Fehler dann in appError setzen if not status_code == 200: safrs_obj.appError( "{} - {}".format(method, message ), str( status_code ) ) result = jsonify( {} ) result.status_code = status_code #---------------------------------------------------------------------- # Auswertung der Ergebnisse # # result holen und zusätzliche informationen einfügen _data = { } _data = result.get_json() # _data muss immer ein dict sein (dict, list, SAFRSFormattedResponse) if not type( _data ) == dict: # ist _data list dann als data verwenden, sonst in _wrongdatatype einfügen if type( _data ) == list: _data = {"data": _data} # data bereich in data muss immer list sein if not 'data' in _data or _data['data'] is None: _data['data'] = [] if not 'meta' in _data: # ohne meta mind. count mitgeben _data['meta'] = { "count": len( _data.get("data", [] ) ) } if not 'count' in _data['meta'] or _data['meta']['count'] is None: _data['meta']['count'] = 0 # offset für die Bestimmung des letzten im Grid try: _data['meta']["offset"] = int( get_request_param("page_offset") ) except ValueError: # pragma: no cover _data['meta']["offset"] = 0 #raise ValidationError("Pagination Value Error") # die Angaben aus _resultUpdate (App-Error, App-Info,...) des Datenbankobjects hinzufügen # _data.update( safrs_obj._resultUpdate ) try: result.set_data( json.dumps( _data ) ) except : # pragma: no cover result.status_code = 500 log.error("wrapped_fun data error") # http statuscode auswerten if "status_code" in result.json: result.status_code = result.json["status_code"] return result return wrapped_fn
[ "def", "ispSAFRS_decorator", "(", "fn", ")", ":", "@", "wraps", "(", "fn", ")", "def", "wrapped_fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "\"\"\"Funktion zum verarbeiten eine API Anfrage.\n\n Arten von fn - safrs/jsonapi.py\n\n GET /api/<modul> - SAFRSRestAPI.get\n GET /api/<modul>/11 - SAFRSRestAPI.get\n GET /api/<modul>/groupby/? - SAFRSJSONRPCAPI\n GET /api/<modul>/items_geraet/? - SAFRSJSONRPCAPI\n\n SAFRSRestAPI - results - __qualname__\n .get - jsonify(result)\n .post - response\n .patch - response\n .delete - {}, HTTPStatus.NO_CONTENT\n\n SAFRSJSONRPCAPI - safrs/jsonapi.py - __qualname__\n .get - jsonify(result)\n .post - response\n\n SAFRSRestRelationshipAPI - safrs/jsonapi.py - __qualname__\n .get - jsonify(result)\n .patch - response\n .post - {}, HTTPStatus.NO_CONTENT - we can return result too but it's not necessary per the spec\n .delete - {}, HTTPStatus.NO_CONTENT\n\n SAFRSRestAPI::\n\n das Vorhandensein einer _s_object_id unterscheidet zwischen dem holen eines Datensatzes oder einer Liste\n id = kwargs.get( fn.SAFRSObject._s_object_id , None)\n\n Parameters\n ----------\n *args : tuple\n mit einem object\n .. code::\n\n /api/gqadb/?zahl=12 - ( safrs._api.gqadb_API, ) - {}\n /api/gqadb/2020?zahl=12 - ( safrs._api.gqadb_API, ) - {'gqadbId': '2020'}\n /api/gqadb/test?zahl=12 - ( safrs._api.method_gqadb_test, ) - {}\n /api/gqa?zahl=12 - ( safrs._api.gqa_API, ) - {}\n /api/gqa/2020?zahl=12 - ( safrs._api.gqa_API, ) - {'gqaId': '2020'}\n /api/gqa/test?zahl=12 - ( safrs._api.gqa_API, ) - {'gqaId': 'test'}\n\n **kwargs : dict\n beliebige Parameter.\n\n Returns\n -------\n result : result\n ResultObject.\n\n Tests::\n\n log - q, request.endpoint, safrs_obj._s_object_id, json.dumps(kwargs)\n /api/gqadb - ['SAFRSRestAPI', 'get'] - api.gqadb - gqadbId - {}\n /api/gqadb/2020 - ['SAFRSRestAPI', 'get'] - api.gqadbId - gqadbId - {\"gqadbId\": \"2020\"}\n /api/gqadb/test - ['SAFRSJSONRPCAPI', 'get'] - api.gqadb.test - gqadbId - {}\n\n /api/gqa - ['SAFRSRestAPI', 'get'] - api.gqa - gqaId - {}\n /api/gqa/2020 - ['SAFRSRestAPI', 'get'] - api.gqaId - gqaId - {\"gqaId\": \"2020\"}\n /api/gqa/test - ['SAFRSRestAPI', 'get'] - api.gqaId - gqaId - {\"gqaId\": \"test\"}\n\n bei /api/gqadb/test\n\n fn.__name__ - get\n fn.__module__ - safrs.jsonapi\n request.endpoint - api.gqadb.test\n safrs_obj.__module__ - app.db\n safrs_obj.__name__ - gqadb\n safrs_obj._api.prefix - /api\n \"\"\"", "# das verwendete Object bestimmen um SAFRSRestRelationshipAPI zu erkennen", "q", "=", "fn", ".", "__qualname__", ".", "split", "(", "\".\"", ")", "# die Aufruf Methode: get,post,patch,delete", "method", "=", "q", "[", "1", "]", "# mit SAFRSRestRelationshipAPI dessen target verwenden", "if", "q", "[", "0", "]", "==", "\"SAFRSRestRelationshipAPI\"", ":", "# eine SAFRSRestRelationshipAPI hat keine _int_init funktion", "result", "=", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "# zum weiterarbeiten das _target Object verwenden", "safrs_obj", "=", "fn", ".", "SAFRSObject", ".", "_target", "else", ":", "safrs_obj", "=", "fn", ".", "SAFRSObject", "#", "# Das SAFRSObject vor jedem Aufruf Vorbereiten", "#", "safrs_obj", ".", "_int_init", "(", ")", "# func_name wird für den _int_call aufruf benötigt", "# - da ein Aufruf von fn() request parameter", "# und nicht kwargs an die Aufzurufende Funktion weitergibt", "# wird bei Angabe von func_name die Funktion mit kwargs uber _int_call aufgerufen", "func_name", "=", "None", "swagger_path", "=", "\"\"", "#print(\"wrapped_fn\", q )", "# nur bei get parameter prüfen", "if", "method", "==", "\"get\"", ":", "# Merker für Variante b: objectId wird später wieder eingefügt", "objectId", "=", "None", "# Argumente parsen", "doArgParse", "=", "False", "# sonderbehandlung bei request.endpoint /api/gqa/<func>", "# ist api.gqaId - sollte aber api.gqa.<func> sein", "# der letzte Teil von request.path ist safrs_obj._s_object_id in kwargs und eine funktion", "name", "=", "safrs_obj", ".", "__name__", ".", "lower", "(", ")", "# nur bei einer eigener Erweiterung ohne Datenbank", "if", "hasattr", "(", "fn", ".", "SAFRSObject", ",", "\"no_flask_admin\"", ")", "and", "fn", ".", "SAFRSObject", ".", "no_flask_admin", "==", "True", ":", "'''\n Möglichkeiten:\n a) /class/ : api_list in class aufrufen\n b) /class/{objectId}/ : keine Funktion objectId vorhanden also api_get aufrufen\n c) /class/test : Vorhandene Funktion test in class aufrufen, objectId (test) aus den Parametern entfernen\n\n '''", "doArgParse", "=", "True", "variante", "=", "\"\"", "# swagger_path zuerst nur der Modulname", "swagger_path", "=", "\"/{}\"", ".", "format", "(", "name", ")", "#print( \"wrapped_fn\", swagger_path, safrs_obj._s_object_id, kwargs )", "func", "=", "None", "# b, c) gibt es eine passende jsonapi_rpc methode", "if", "safrs_obj", ".", "_s_object_id", "in", "kwargs", ":", "#log.warning(\"safrs_obj.object_id in kwargs\")", "# auf möglichkeit c) testen", "try", ":", "func", "=", "getattr", "(", "safrs_obj", ",", "kwargs", "[", "safrs_obj", ".", "_s_object_id", "]", ",", "None", ")", "variante", "=", "\"c\"", "except", ":", "# pragma: no cover", "# getattr gibt einen Fehler bei query", "log", ".", "warning", "(", "\"keine func: getattr gibt einen Fehler bei query\"", ")", "pass", "# also b) verwenden", "if", "not", "func", ":", "try", ":", "func", "=", "getattr", "(", "safrs_obj", ",", "'api_get'", ",", "None", ")", "if", "func", ":", "variante", "=", "\"b\"", "except", ":", "# pragma: no cover", "# getattr gibt einen Fehler bei query", "log", ".", "warning", "(", "\"no func: getattr gibt einen Fehler bei query\"", ")", "pass", "else", ":", "# ohne object_id Möglichkeit a)", "try", ":", "func", "=", "getattr", "(", "safrs_obj", ",", "\"api_list\"", ",", "None", ")", "if", "func", ":", "variante", "=", "\"a\"", "except", ":", "# pragma: no cover", "# getattr gibt einen Fehler bei query", "log", ".", "warning", "(", "\"no func: getattr gibt einen Fehler bei query\"", ")", "pass", "#log.warning(\"{} and __rest_doc variante: {}\".format(func, variante) )", "# wurde eine Funktion gefunden und hat sie ein __rest_doc dann auswerten", "if", "func", "and", "hasattr", "(", "func", ",", "'__rest_doc'", ")", ":", "func_name", "=", "func", ".", "__name__", "if", "variante", "==", "\"a\"", ":", "swagger_path", "=", "\"/{}/\"", ".", "format", "(", "name", ")", "elif", "variante", "==", "\"b\"", ":", "# objectId merken", "objectId", "=", "kwargs", "[", "safrs_obj", ".", "_s_object_id", "]", "swagger_path", "=", "\"/{}/{}/\"", ".", "format", "(", "name", ",", "\"{\"", "+", "safrs_obj", ".", "_s_object_id", "+", "\"}\"", ")", "elif", "variante", "==", "\"c\"", ":", "swagger_path", "=", "\"/{}/{}\"", ".", "format", "(", "name", ",", "func_name", ")", "else", ":", "# es gibt keine passende Funktion also Fehler anzeigen", "status_code", "=", "400", "message", "=", "\"Funktion nicht gefunden\"", "safrs_obj", ".", "appError", "(", "\"{}\"", ".", "format", "(", "message", ")", ",", "str", "(", "status_code", ")", ")", "result", "=", "jsonify", "(", "{", "}", ")", "result", ".", "status_code", "=", "status_code", "return", "result", "elif", "q", "[", "0", "]", "==", "\"SAFRSJSONRPCAPI\"", ":", "# dieser Bereich wird in db bei groupby, undefined oder funktionen aufgerufen", "doArgParse", "=", "True", "# den request endpoint bestimmen - wird benötigt um Swagger parameter zu prüfen", "# der erste teil ist immer api der letzte Teil die aufzurufende Funktion", "ep_list", "=", "request", ".", "endpoint", ".", "split", "(", "\".\"", ")", "func_name", "=", "ep_list", "[", "-", "1", "]", "# bei diesem", "swagger_path", "=", "\"/{}/{}\"", ".", "format", "(", "name", ",", "ep_list", "[", "-", "1", "]", ")", "else", ":", "# einfach durchlaufen ohne die Argumente zu prüfen", "# SAFRSRestRelationshipAPI - get - dbtestsrelId - {\"dbtestsId\": \"2\"}", "doArgParse", "=", "False", "# nur in swagger abgelegte paramter verwenden und ggf umwandeln", "# in safrs methoden selbst wird args = dict(request.args) verwendet", "# _int_parse_args entfernt _s_object_id", "if", "doArgParse", ":", "kwargs", "=", "safrs_obj", ".", "_int_parse_args", "(", "kwargs", ",", "method", ",", "swagger_path", ")", "# gemerkte objectId wieder einfügen", "if", "objectId", ":", "kwargs", "[", "safrs_obj", ".", "_s_object_id", "]", "=", "objectId", "elif", "method", "==", "\"post\"", ":", "pass", "request", ".", "groups", "=", "{", "}", "# Parse the jsonapi groups and groups[] args", "for", "arg", ",", "val", "in", "request", ".", "args", ".", "items", "(", ")", ":", "# https://jsonapi.org/format/#fetching-sparse-fieldsets", "groups_attr", "=", "re", ".", "search", "(", "r\"groups\\[(\\w+)\\]\"", ",", "arg", ")", "if", "groups_attr", ":", "group_type", "=", "groups_attr", ".", "group", "(", "1", ")", "request", ".", "groups", "[", "group_type", "]", "=", "val", ".", "split", "(", "\",\"", ")", "elif", "arg", "==", "\"groups\"", ":", "# groups ohne andere tabelle verwendet die aktuelle tabelle", "request", ".", "groups", "[", "safrs_obj", ".", "__name__", "]", "=", "val", ".", "split", "(", "\",\"", ")", "# funktion in der Klasse ausführen sonst fn selbst", "if", "func_name", ":", "# gewünschte Funktion in fn.SAFRSObject aufrufen", "meth", "=", "fn", ".", "SAFRSObject", "meth", ".", "appInfo", "(", "\"safrs\"", ",", "\"Funktion: {}.{}.{}()\"", ".", "format", "(", "meth", ".", "__module__", ",", "meth", ".", "__name__", ",", "func_name", ")", ")", "if", "hasattr", "(", "meth", ",", "func_name", ")", ":", "if", "func_name", "[", ":", "4", "]", "==", "\"api_\"", ":", "# api_ Funktionen benötigen die Klasse selbst als ersten parameter", "result", "=", "getattr", "(", "meth", ",", "func_name", ")", "(", "meth", ",", "*", "*", "kwargs", ")", "else", ":", "result", "=", "getattr", "(", "meth", ",", "func_name", ")", "(", "*", "*", "kwargs", ")", "else", ":", "# pragma: no cover", "# kann eigentlich nicht passieren da oberhalb gestetet wird", "meth", ".", "appError", "(", "\"ispSAFRSDummy\"", ",", "\"Fehlende Funktion: {}.{}.{}()\"", ".", "format", "(", "meth", ".", "__module__", ",", "meth", ".", "__name__", ",", "func_name", ")", ")", "result", "=", "meth", ".", "_int_json_response", "(", "{", "}", ")", "# abfangen das result auch etwas anderes als ein dict sein kann (z.b. html, pdf ...)", "if", "not", "type", "(", "result", ")", "in", "[", "dict", ",", "list", ",", "SAFRSFormattedResponse", "]", ":", "return", "result", "try", ":", "result", "=", "jsonify", "(", "result", ")", "except", "Exception", "as", "exc", ":", "# pragma: no cover", "status_code", "=", "getattr", "(", "exc", ",", "\"status_code\"", ",", "500", ")", "message", "=", "getattr", "(", "exc", ",", "\"message\"", ",", "\"unbekannter Fehler\"", ")", "safrs_obj", ".", "appError", "(", "\"{} - {}\"", ".", "format", "(", "func_name", ",", "message", ")", ",", "str", "(", "status_code", ")", ")", "result", "=", "jsonify", "(", "{", "}", ")", "else", ":", "#", "# die ursprüngliche Funktion aufrufen", "#", "# print(args)", "# log.error(\"wrapped_fn - post:\" )", "# print(\"wrapped_fn\", q, args, kwargs, fn )", "status_code", "=", "200", "try", ":", "result", "=", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", "(", "ValidationError", ",", "GenericError", ",", "NotFoundError", ")", "as", "exc", ":", "# pragma: no cover", "status_code", "=", "getattr", "(", "exc", ",", "\"status_code\"", ",", "500", ")", "message", "=", "getattr", "(", "exc", ",", "\"message\"", ",", "\"\"", ")", "except", "werkzeug", ".", "exceptions", ".", "NotFound", ":", "status_code", "=", "404", "message", "=", "\"Not Found\"", "except", "Exception", "as", "exc", ":", "# pragma: no cover", "status_code", "=", "getattr", "(", "exc", ",", "\"status_code\"", ",", "500", ")", "message", "=", "getattr", "(", "exc", ",", "\"message\"", ",", "\"unbekannter Fehler\"", ")", "# gab es einen Fehler dann in appError setzen", "if", "not", "status_code", "==", "200", ":", "safrs_obj", ".", "appError", "(", "\"{} - {}\"", ".", "format", "(", "method", ",", "message", ")", ",", "str", "(", "status_code", ")", ")", "result", "=", "jsonify", "(", "{", "}", ")", "result", ".", "status_code", "=", "status_code", "#----------------------------------------------------------------------", "# Auswertung der Ergebnisse", "#", "# result holen und zusätzliche informationen einfügen", "_data", "=", "{", "}", "_data", "=", "result", ".", "get_json", "(", ")", "# _data muss immer ein dict sein (dict, list, SAFRSFormattedResponse)", "if", "not", "type", "(", "_data", ")", "==", "dict", ":", "# ist _data list dann als data verwenden, sonst in _wrongdatatype einfügen", "if", "type", "(", "_data", ")", "==", "list", ":", "_data", "=", "{", "\"data\"", ":", "_data", "}", "# data bereich in data muss immer list sein", "if", "not", "'data'", "in", "_data", "or", "_data", "[", "'data'", "]", "is", "None", ":", "_data", "[", "'data'", "]", "=", "[", "]", "if", "not", "'meta'", "in", "_data", ":", "# ohne meta mind. count mitgeben", "_data", "[", "'meta'", "]", "=", "{", "\"count\"", ":", "len", "(", "_data", ".", "get", "(", "\"data\"", ",", "[", "]", ")", ")", "}", "if", "not", "'count'", "in", "_data", "[", "'meta'", "]", "or", "_data", "[", "'meta'", "]", "[", "'count'", "]", "is", "None", ":", "_data", "[", "'meta'", "]", "[", "'count'", "]", "=", "0", "# offset für die Bestimmung des letzten im Grid", "try", ":", "_data", "[", "'meta'", "]", "[", "\"offset\"", "]", "=", "int", "(", "get_request_param", "(", "\"page_offset\"", ")", ")", "except", "ValueError", ":", "# pragma: no cover", "_data", "[", "'meta'", "]", "[", "\"offset\"", "]", "=", "0", "#raise ValidationError(\"Pagination Value Error\")", "# die Angaben aus _resultUpdate (App-Error, App-Info,...) des Datenbankobjects hinzufügen", "#", "_data", ".", "update", "(", "safrs_obj", ".", "_resultUpdate", ")", "try", ":", "result", ".", "set_data", "(", "json", ".", "dumps", "(", "_data", ")", ")", "except", ":", "# pragma: no cover", "result", ".", "status_code", "=", "500", "log", ".", "error", "(", "\"wrapped_fun data error\"", ")", "# http statuscode auswerten", "if", "\"status_code\"", "in", "result", ".", "json", ":", "result", ".", "status_code", "=", "result", ".", "json", "[", "\"status_code\"", "]", "return", "result", "return", "wrapped_fn" ]
[ 260, 0 ]
[ 635, 21 ]
null
python
de
['de', 'de', 'de']
True
true
null
DicomImage.initMemoryDicom
(self, data)
return True
Lädt Dicom daten die schon im Speicher sind Parameters ---------- data : dict - dicom : pydicom.FileDataset <class 'pydicom.dataset.FileDataset'> - dicom : pydicom.Dataset <class 'pydicom.dataset.Dataset'> Dicomdaten Dataset oder FileDataset - info: dict Info der Dicomdaten
Lädt Dicom daten die schon im Speicher sind Parameters ---------- data : dict - dicom : pydicom.FileDataset <class 'pydicom.dataset.FileDataset'> - dicom : pydicom.Dataset <class 'pydicom.dataset.Dataset'> Dicomdaten Dataset oder FileDataset - info: dict Info der Dicomdaten
def initMemoryDicom(self, data): """ Lädt Dicom daten die schon im Speicher sind Parameters ---------- data : dict - dicom : pydicom.FileDataset <class 'pydicom.dataset.FileDataset'> - dicom : pydicom.Dataset <class 'pydicom.dataset.Dataset'> Dicomdaten Dataset oder FileDataset - info: dict Info der Dicomdaten """ if "info" in data: self.infos = data["info"] else: self.infos = data # definition der Übergabe variablen dtype=None self._sid = None self._dpi = None # auf Dataset oder FileDataset <class 'pydicom.dataset.FileDataset'> abfragen if not data["dicom"].__class__.__name__ in [ "Dataset", "FileDataset"]: return False self.metadata = data["dicom"] # pydicom.FileDataset <class 'pydicom.dataset.FileDataset'> # dtype=uint16; SOPClassUID=RT Image Storage self._original_dtype = self.metadata.pixel_array.dtype if dtype is not None: self.array = self.metadata.pixel_array.astype(dtype) else: self.array = self.metadata.pixel_array # convert values to proper HU: real_values = slope * raw + intercept if self.metadata.SOPClassUID.name == 'CT Image Storage': self.array = int(self.metadata.RescaleSlope)*self.array + int(self.metadata.RescaleIntercept) return True
[ "def", "initMemoryDicom", "(", "self", ",", "data", ")", ":", "if", "\"info\"", "in", "data", ":", "self", ".", "infos", "=", "data", "[", "\"info\"", "]", "else", ":", "self", ".", "infos", "=", "data", "# definition der Übergabe variablen", "dtype", "=", "None", "self", ".", "_sid", "=", "None", "self", ".", "_dpi", "=", "None", "# auf Dataset oder FileDataset <class 'pydicom.dataset.FileDataset'> abfragen", "if", "not", "data", "[", "\"dicom\"", "]", ".", "__class__", ".", "__name__", "in", "[", "\"Dataset\"", ",", "\"FileDataset\"", "]", ":", "return", "False", "self", ".", "metadata", "=", "data", "[", "\"dicom\"", "]", "# pydicom.FileDataset <class 'pydicom.dataset.FileDataset'>", "# dtype=uint16; SOPClassUID=RT Image Storage", "self", ".", "_original_dtype", "=", "self", ".", "metadata", ".", "pixel_array", ".", "dtype", "if", "dtype", "is", "not", "None", ":", "self", ".", "array", "=", "self", ".", "metadata", ".", "pixel_array", ".", "astype", "(", "dtype", ")", "else", ":", "self", ".", "array", "=", "self", ".", "metadata", ".", "pixel_array", "# convert values to proper HU: real_values = slope * raw + intercept", "if", "self", ".", "metadata", ".", "SOPClassUID", ".", "name", "==", "'CT Image Storage'", ":", "self", ".", "array", "=", "int", "(", "self", ".", "metadata", ".", "RescaleSlope", ")", "*", "self", ".", "array", "+", "int", "(", "self", ".", "metadata", ".", "RescaleIntercept", ")", "return", "True" ]
[ 311, 4 ]
[ 351, 19 ]
null
python
de
['de', 'de', 'de']
True
true
null
get_temp_resource_path
(path: str, pipeline_id: str)
return get_resource_path(os.path.join(TEMP_LOCATION, pipeline_id, path))
Erstellt einen absoluten Pfad zu der übergebene Ressource im Temp-Ordner. :param path: Pfad zur Ressource, relativ zum `resources/temp`-Ordner. :param pipeline_id: id der Pipeline von der die Funktion aufgerufen wurde. :type pipeline_id: str
Erstellt einen absoluten Pfad zu der übergebene Ressource im Temp-Ordner.
def get_temp_resource_path(path: str, pipeline_id: str): """Erstellt einen absoluten Pfad zu der übergebene Ressource im Temp-Ordner. :param path: Pfad zur Ressource, relativ zum `resources/temp`-Ordner. :param pipeline_id: id der Pipeline von der die Funktion aufgerufen wurde. :type pipeline_id: str """ return get_resource_path(os.path.join(TEMP_LOCATION, pipeline_id, path))
[ "def", "get_temp_resource_path", "(", "path", ":", "str", ",", "pipeline_id", ":", "str", ")", ":", "return", "get_resource_path", "(", "os", ".", "path", ".", "join", "(", "TEMP_LOCATION", ",", "pipeline_id", ",", "path", ")", ")" ]
[ 142, 0 ]
[ 149, 76 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispBase.checkFields
(self, meta:dict={}, baseField=None, fields=None, fieldLen:int=-1, warn:bool=True )
return result
prüft ob in den Pandas.dataFrames ein baseField und die fieldLen von fields stimmen BaseField muss nicht angegeben werden Parameters ---------- meta: dict metadata des Aufrufs baseField : pandas das basis Feld fields: pandas die ermittelten passenden Felder fieldLen: int die Anzahl der benötigten Felder warn: Eine Warung auf logger ausgeben Returns ------- result: empty dict or dict with - msg: Message string for PDF output - data: list of fields
prüft ob in den Pandas.dataFrames ein baseField und die fieldLen von fields stimmen
def checkFields(self, meta:dict={}, baseField=None, fields=None, fieldLen:int=-1, warn:bool=True ): """prüft ob in den Pandas.dataFrames ein baseField und die fieldLen von fields stimmen BaseField muss nicht angegeben werden Parameters ---------- meta: dict metadata des Aufrufs baseField : pandas das basis Feld fields: pandas die ermittelten passenden Felder fieldLen: int die Anzahl der benötigten Felder warn: Eine Warung auf logger ausgeben Returns ------- result: empty dict or dict with - msg: Message string for PDF output - data: list of fields """ err_columns = ["CourseId", "PlanSetupId", "RadiationId", "ImageId", "acquisition", "gantry", "SliceUID"] # err_columns = ["FeldArt", "CourseId", "PlanSetupId", "RadiationId", "ImageId", "acquisition", "SliceUID"] result = {} errors = [] err_fields = pd.DataFrame(columns=err_columns) err_fields['FeldArt'] = '' # base Field if isinstance(baseField, pd.DataFrame) and len(baseField.index) != 1: if len(baseField.index) > 1: errors.append( "- baseField ist nicht 1" ) dfb = baseField[ err_columns ] dfb['FeldArt'] = 'base' err_fields = err_fields.append(dfb) else: errors.append( "- baseField fehlt" ) # fields prüfen if fieldLen > -1 and isinstance(fields, pd.DataFrame) and len(fields.index) != fieldLen: dff = fields[ err_columns ] dff['FeldArt'] = 'field' err_fields = err_fields.append(dff) errors.append( "- Feldzahl ist {} statt {}".format( len(fields.index), fieldLen ) ) #errors.append( err_fields ) if len(err_fields) > 0: meta_str = self.getMetaErrorString( meta ) # self.appError( meta_str, errors ) if warn: logger.error( meta_str + ": " + json.dumps( errors ) ) result = {"msg": "\n\n".join(errors), "data": err_fields} #print( result ) return result
[ "def", "checkFields", "(", "self", ",", "meta", ":", "dict", "=", "{", "}", ",", "baseField", "=", "None", ",", "fields", "=", "None", ",", "fieldLen", ":", "int", "=", "-", "1", ",", "warn", ":", "bool", "=", "True", ")", ":", "err_columns", "=", "[", "\"CourseId\"", ",", "\"PlanSetupId\"", ",", "\"RadiationId\"", ",", "\"ImageId\"", ",", "\"acquisition\"", ",", "\"gantry\"", ",", "\"SliceUID\"", "]", "# err_columns = [\"FeldArt\", \"CourseId\", \"PlanSetupId\", \"RadiationId\", \"ImageId\", \"acquisition\", \"SliceUID\"]", "result", "=", "{", "}", "errors", "=", "[", "]", "err_fields", "=", "pd", ".", "DataFrame", "(", "columns", "=", "err_columns", ")", "err_fields", "[", "'FeldArt'", "]", "=", "''", "# base Field", "if", "isinstance", "(", "baseField", ",", "pd", ".", "DataFrame", ")", "and", "len", "(", "baseField", ".", "index", ")", "!=", "1", ":", "if", "len", "(", "baseField", ".", "index", ")", ">", "1", ":", "errors", ".", "append", "(", "\"- baseField ist nicht 1\"", ")", "dfb", "=", "baseField", "[", "err_columns", "]", "dfb", "[", "'FeldArt'", "]", "=", "'base'", "err_fields", "=", "err_fields", ".", "append", "(", "dfb", ")", "else", ":", "errors", ".", "append", "(", "\"- baseField fehlt\"", ")", "# fields prüfen", "if", "fieldLen", ">", "-", "1", "and", "isinstance", "(", "fields", ",", "pd", ".", "DataFrame", ")", "and", "len", "(", "fields", ".", "index", ")", "!=", "fieldLen", ":", "dff", "=", "fields", "[", "err_columns", "]", "dff", "[", "'FeldArt'", "]", "=", "'field'", "err_fields", "=", "err_fields", ".", "append", "(", "dff", ")", "errors", ".", "append", "(", "\"- Feldzahl ist {} statt {}\"", ".", "format", "(", "len", "(", "fields", ".", "index", ")", ",", "fieldLen", ")", ")", "#errors.append( err_fields )", "if", "len", "(", "err_fields", ")", ">", "0", ":", "meta_str", "=", "self", ".", "getMetaErrorString", "(", "meta", ")", "# self.appError( meta_str, errors )", "if", "warn", ":", "logger", ".", "error", "(", "meta_str", "+", "\": \"", "+", "json", ".", "dumps", "(", "errors", ")", ")", "result", "=", "{", "\"msg\"", ":", "\"\\n\\n\"", ".", "join", "(", "errors", ")", ",", "\"data\"", ":", "err_fields", "}", "#print( result )", "return", "result" ]
[ 207, 4 ]
[ 273, 21 ]
null
python
de
['de', 'de', 'de']
True
true
null
_bezierCasteljau
(bezier, t)
return punkte[3][0], punkte
Fuer eine Bezierkurve den Punkt fuer t\in[0,1] berechnen (nach Algorithmus von Casteljau)
Fuer eine Bezierkurve den Punkt fuer t\in[0,1] berechnen (nach Algorithmus von Casteljau)
def _bezierCasteljau(bezier, t): '''Fuer eine Bezierkurve den Punkt fuer t\in[0,1] berechnen (nach Algorithmus von Casteljau)''' punkte = [bezier] for k in [1,2,3]: punkte.append([]) for i in range(0,3-k+1): # punkte[k,i] = ... punkte[k].append(_punktAufGerade(t, punkte[k-1][i], punkte[k-1][i+1])) return punkte[3][0], punkte
[ "def", "_bezierCasteljau", "(", "bezier", ",", "t", ")", ":", "punkte", "=", "[", "bezier", "]", "for", "k", "in", "[", "1", ",", "2", ",", "3", "]", ":", "punkte", ".", "append", "(", "[", "]", ")", "for", "i", "in", "range", "(", "0", ",", "3", "-", "k", "+", "1", ")", ":", "# punkte[k,i] = ...", "punkte", "[", "k", "]", ".", "append", "(", "_punktAufGerade", "(", "t", ",", "punkte", "[", "k", "-", "1", "]", "[", "i", "]", ",", "punkte", "[", "k", "-", "1", "]", "[", "i", "+", "1", "]", ")", ")", "return", "punkte", "[", "3", "]", "[", "0", "]", ",", "punkte" ]
[ 10, 0 ]
[ 18, 31 ]
null
python
de
['de', 'de', 'de']
True
true
null
calculate_sum
(values: dict, data: StepData)
Findet die Summe von Werten, die in einem Array stehen. :param values: Werte aus der JSON-Datei :param data: Daten aus der API
Findet die Summe von Werten, die in einem Array stehen.
def calculate_sum(values: dict, data: StepData): """Findet die Summe von Werten, die in einem Array stehen. :param values: Werte aus der JSON-Datei :param data: Daten aus der API """ for idx, key in data.loop_key(values["keys"], values): value = data.get_data(key, values) inner_key = values.get("innerKey", None) if inner_key: value = [reduce(operator.getitem, inner_key[0].split('|'), x) for x in value] new_key = get_new_keys(values, idx) new_value = sum(value) if values.get("decimal", None): new_value = round(new_value, data.get_data(values["decimal"], values, numbers.Number)) else: new_value = round(new_value) data.insert_data(new_key, new_value, values) if values.get("save_idx_to", None): data.insert_data(values["save_idx_to"][idx], value.index(new_value), values)
[ "def", "calculate_sum", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "for", "idx", ",", "key", "in", "data", ".", "loop_key", "(", "values", "[", "\"keys\"", "]", ",", "values", ")", ":", "value", "=", "data", ".", "get_data", "(", "key", ",", "values", ")", "inner_key", "=", "values", ".", "get", "(", "\"innerKey\"", ",", "None", ")", "if", "inner_key", ":", "value", "=", "[", "reduce", "(", "operator", ".", "getitem", ",", "inner_key", "[", "0", "]", ".", "split", "(", "'|'", ")", ",", "x", ")", "for", "x", "in", "value", "]", "new_key", "=", "get_new_keys", "(", "values", ",", "idx", ")", "new_value", "=", "sum", "(", "value", ")", "if", "values", ".", "get", "(", "\"decimal\"", ",", "None", ")", ":", "new_value", "=", "round", "(", "new_value", ",", "data", ".", "get_data", "(", "values", "[", "\"decimal\"", "]", ",", "values", ",", "numbers", ".", "Number", ")", ")", "else", ":", "new_value", "=", "round", "(", "new_value", ")", "data", ".", "insert_data", "(", "new_key", ",", "new_value", ",", "values", ")", "if", "values", ".", "get", "(", "\"save_idx_to\"", ",", "None", ")", ":", "data", ".", "insert_data", "(", "values", "[", "\"save_idx_to\"", "]", "[", "idx", "]", ",", "value", ".", "index", "(", "new_value", ")", ",", "values", ")" ]
[ 75, 0 ]
[ 95, 88 ]
null
python
de
['de', 'de', 'de']
True
true
null
Zeitrechnung.__repr__
(self)
return f"<Zeitrechnung {self.kuerzel}>"
Der Representations-String der Zeitrechnung :return str
Der Representations-String der Zeitrechnung
def __repr__(self): """Der Representations-String der Zeitrechnung :return str""" return f"<Zeitrechnung {self.kuerzel}>"
[ "def", "__repr__", "(", "self", ")", ":", "return", "f\"<Zeitrechnung {self.kuerzel}>\"" ]
[ 21, 4 ]
[ 25, 47 ]
null
python
de
['de', 'de', 'de']
True
true
null
OpenREADMEtxt
()
return
Oeffnen der README.txt im aktuellen Skriptverzeichnis
Oeffnen der README.txt im aktuellen Skriptverzeichnis
def OpenREADMEtxt(): ''' Oeffnen der README.txt im aktuellen Skriptverzeichnis ''' global myPath location = os.path.join(myPath, "README.md") # README-Datei öffnen os.startfile(location) return
[ "def", "OpenREADMEtxt", "(", ")", ":", "global", "myPath", "location", "=", "os", ".", "path", ".", "join", "(", "myPath", ",", "\"README.md\"", ")", "# README-Datei öffnen\r", "os", ".", "startfile", "(", "location", ")", "return" ]
[ 219, 0 ]
[ 228, 10 ]
null
python
de
['de', 'de', 'de']
True
true
null
storing
(values: dict, data: StepData)
Schreibt die API-Daten nach Ausführung der `transform`-Typen in eine JSON-Datei. Als Dateiname wird der Jobname sowie das heutige Datum verwendet. :param values: Werte aus der JSON-Datei :param data: Daten aus der API
Schreibt die API-Daten nach Ausführung der `transform`-Typen in eine JSON-Datei.
def storing(values: dict, data: StepData): """Schreibt die API-Daten nach Ausführung der `transform`-Typen in eine JSON-Datei. Als Dateiname wird der Jobname sowie das heutige Datum verwendet. :param values: Werte aus der JSON-Datei :param data: Daten aus der API """ if values.get("storing", None): for value in values["storing"]: new_data = _remove_keys(value, data, data.get_data(value["key"], values)) name = data.format(value["name"]) if value.get("safe_only_on_change", False): try: with resources.open_specific_memory_resource(data.get_config("job_name"), name, False) as fp: old_data = json.loads(fp.read()) if old_data == new_data: continue except (FileNotFoundError, IndexError): pass file_name = resources.new_memory_resource_path(data.get_config("job_name"), name) with open(file_name, 'w') as fp: json.dump(new_data, fp) delete_memory_files(data.get_config("job_name"), value["name"], data.get_data(value.get("count", 20), values, int))
[ "def", "storing", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "if", "values", ".", "get", "(", "\"storing\"", ",", "None", ")", ":", "for", "value", "in", "values", "[", "\"storing\"", "]", ":", "new_data", "=", "_remove_keys", "(", "value", ",", "data", ",", "data", ".", "get_data", "(", "value", "[", "\"key\"", "]", ",", "values", ")", ")", "name", "=", "data", ".", "format", "(", "value", "[", "\"name\"", "]", ")", "if", "value", ".", "get", "(", "\"safe_only_on_change\"", ",", "False", ")", ":", "try", ":", "with", "resources", ".", "open_specific_memory_resource", "(", "data", ".", "get_config", "(", "\"job_name\"", ")", ",", "name", ",", "False", ")", "as", "fp", ":", "old_data", "=", "json", ".", "loads", "(", "fp", ".", "read", "(", ")", ")", "if", "old_data", "==", "new_data", ":", "continue", "except", "(", "FileNotFoundError", ",", "IndexError", ")", ":", "pass", "file_name", "=", "resources", ".", "new_memory_resource_path", "(", "data", ".", "get_config", "(", "\"job_name\"", ")", ",", "name", ")", "with", "open", "(", "file_name", ",", "'w'", ")", "as", "fp", ":", "json", ".", "dump", "(", "new_data", ",", "fp", ")", "delete_memory_files", "(", "data", ".", "get_config", "(", "\"job_name\"", ")", ",", "value", "[", "\"name\"", "]", ",", "data", ".", "get_data", "(", "value", ".", "get", "(", "\"count\"", ",", "20", ")", ",", "values", ",", "int", ")", ")" ]
[ 15, 0 ]
[ 40, 98 ]
null
python
de
['de', 'de', 'de']
True
true
null
AnnotationTokenizer.t_VORGETRAGEN
(self, t)
return t
r'vorgetragen
r'vorgetragen
def t_VORGETRAGEN(self, t): r'vorgetragen' return t
[ "def", "t_VORGETRAGEN", "(", "self", ",", "t", ")", ":", "return", "t" ]
[ 470, 4 ]
[ 472, 16 ]
null
python
de
['de', 'de', 'de']
False
true
null
Arc.get_nodes
(self, parts=8, render=False)
return (p for p in points_on_arc(self.center, self.radius, self.startangle, self.endangle, parts=parts))
Die Funktion liefert eine Liste von virtuellen Nodes, welche man zum Rechnen der convex_hull() benötigt.
Die Funktion liefert eine Liste von virtuellen Nodes, welche man zum Rechnen der convex_hull() benötigt.
def get_nodes(self, parts=8, render=False): """ Die Funktion liefert eine Liste von virtuellen Nodes, welche man zum Rechnen der convex_hull() benötigt. """ if render and self.rtheta is not None: theta = np.arange(self.start_param, self.end_param, 0.1) x = 0.5 * self.width * np.cos(theta) y = 0.5 * self.height * np.sin(theta) R = np.array([ [np.cos(self.rtheta), -np.sin(self.rtheta)], [np.sin(self.rtheta), np.cos(self.rtheta)]]) x, y = np.dot(R, np.array([x, y])) x += self.center[0] y += self.center[1] nodes = list(zip(x, y)) nodes.append(self.p2) return nodes return (p for p in points_on_arc(self.center, self.radius, self.startangle, self.endangle, parts=parts))
[ "def", "get_nodes", "(", "self", ",", "parts", "=", "8", ",", "render", "=", "False", ")", ":", "if", "render", "and", "self", ".", "rtheta", "is", "not", "None", ":", "theta", "=", "np", ".", "arange", "(", "self", ".", "start_param", ",", "self", ".", "end_param", ",", "0.1", ")", "x", "=", "0.5", "*", "self", ".", "width", "*", "np", ".", "cos", "(", "theta", ")", "y", "=", "0.5", "*", "self", ".", "height", "*", "np", ".", "sin", "(", "theta", ")", "R", "=", "np", ".", "array", "(", "[", "[", "np", ".", "cos", "(", "self", ".", "rtheta", ")", ",", "-", "np", ".", "sin", "(", "self", ".", "rtheta", ")", "]", ",", "[", "np", ".", "sin", "(", "self", ".", "rtheta", ")", ",", "np", ".", "cos", "(", "self", ".", "rtheta", ")", "]", "]", ")", "x", ",", "y", "=", "np", ".", "dot", "(", "R", ",", "np", ".", "array", "(", "[", "x", ",", "y", "]", ")", ")", "x", "+=", "self", ".", "center", "[", "0", "]", "y", "+=", "self", ".", "center", "[", "1", "]", "nodes", "=", "list", "(", "zip", "(", "x", ",", "y", ")", ")", "nodes", ".", "append", "(", "self", ".", "p2", ")", "return", "nodes", "return", "(", "p", "for", "p", "in", "points_on_arc", "(", "self", ".", "center", ",", "self", ".", "radius", ",", "self", ".", "startangle", ",", "self", ".", "endangle", ",", "parts", "=", "parts", ")", ")" ]
[ 906, 4 ]
[ 929, 54 ]
null
python
de
['de', 'de', 'de']
True
true
null
setup_client
()
return server_instance.test_client()
Flask Server Testclient einrichten. erstellt, eine neue Flask Server Instanz, konfiguriert diese zum Testen und erstellten einen Test Client. :return: einen Server Test Client. :rtype: FlaskClient
Flask Server Testclient einrichten.
def setup_client(): """Flask Server Testclient einrichten. erstellt, eine neue Flask Server Instanz, konfiguriert diese zum Testen und erstellten einen Test Client. :return: einen Server Test Client. :rtype: FlaskClient """ server_instance = server.create_app() server_instance.config["Testing"] = True server_instance.config['DEBUG'] = False return server_instance.test_client()
[ "def", "setup_client", "(", ")", ":", "server_instance", "=", "server", ".", "create_app", "(", ")", "server_instance", ".", "config", "[", "\"Testing\"", "]", "=", "True", "server_instance", ".", "config", "[", "'DEBUG'", "]", "=", "False", "return", "server_instance", ".", "test_client", "(", ")" ]
[ 4, 0 ]
[ 17, 40 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispConfig.set
(self, setkeys:str=None, value=None)
Einen Wert in der Konfiguration ablegen. Parameters ---------- setkeys : str|list Bezeichner dessen Inhalt gesetzt wird . operator für die tiefe value : Zu setzener Inhalt
Einen Wert in der Konfiguration ablegen.
def set(self, setkeys:str=None, value=None): """Einen Wert in der Konfiguration ablegen. Parameters ---------- setkeys : str|list Bezeichner dessen Inhalt gesetzt wird . operator für die tiefe value : Zu setzener Inhalt """ # Startpunkt ist die config selbst here = self._config # setkeys in list umwandeln keys = [] if isinstance(setkeys, str): keys = setkeys.split(".") elif isinstance(setkeys, list): keys = setkeys # For every key *before* the last one, we concentrate on navigating through the dictionary. for key in keys[:-1]: # Try to find here[key]. If it doesn't exist, create it with an empty DotMap. # Then, update our `here` pointer to refer to the thing we just found (or created). here = here.setdefault(key, DotMap() ) # Finally, set the final key to the given value here[keys[-1]] = value
[ "def", "set", "(", "self", ",", "setkeys", ":", "str", "=", "None", ",", "value", "=", "None", ")", ":", "# Startpunkt ist die config selbst", "here", "=", "self", ".", "_config", "# setkeys in list umwandeln", "keys", "=", "[", "]", "if", "isinstance", "(", "setkeys", ",", "str", ")", ":", "keys", "=", "setkeys", ".", "split", "(", "\".\"", ")", "elif", "isinstance", "(", "setkeys", ",", "list", ")", ":", "keys", "=", "setkeys", "# For every key *before* the last one, we concentrate on navigating through the dictionary.", "for", "key", "in", "keys", "[", ":", "-", "1", "]", ":", "# Try to find here[key]. If it doesn't exist, create it with an empty DotMap.", "# Then, update our `here` pointer to refer to the thing we just found (or created).", "here", "=", "here", ".", "setdefault", "(", "key", ",", "DotMap", "(", ")", ")", "# Finally, set the final key to the given value", "here", "[", "keys", "[", "-", "1", "]", "]", "=", "value" ]
[ 519, 4 ]
[ 547, 30 ]
null
python
de
['de', 'de', 'de']
True
true
null
Monat.name
(self)
return self.__name
Der Name des jeweiligen Monats
Der Name des jeweiligen Monats
def name(self) -> Monatsname: """Der Name des jeweiligen Monats""" return self.__name
[ "def", "name", "(", "self", ")", "->", "Monatsname", ":", "return", "self", ".", "__name" ]
[ 55, 4 ]
[ 57, 26 ]
null
python
de
['de', 'de', 'de']
True
true
null
_bezierKreisNR
(n,r)
return [ [ [r*P[0], r*P[1]] for P in k] for k in kurven]
Kreis mit Radius r um Nullpunkte in Bezierkurven, Quadranten n-fach geteilt
Kreis mit Radius r um Nullpunkte in Bezierkurven, Quadranten n-fach geteilt
def _bezierKreisNR(n,r): '''Kreis mit Radius r um Nullpunkte in Bezierkurven, Quadranten n-fach geteilt''' kurven = _bezierKreisN(n) return [ [ [r*P[0], r*P[1]] for P in k] for k in kurven]
[ "def", "_bezierKreisNR", "(", "n", ",", "r", ")", ":", "kurven", "=", "_bezierKreisN", "(", "n", ")", "return", "[", "[", "[", "r", "*", "P", "[", "0", "]", ",", "r", "*", "P", "[", "1", "]", "]", "for", "P", "in", "k", "]", "for", "k", "in", "kurven", "]" ]
[ 64, 0 ]
[ 67, 60 ]
null
python
de
['de', 'de', 'de']
True
true
null
join
(values: dict, data: StepData)
Fügt Elemente einer Liste zu einem String zusammen mit jeweils einem Delimiter dazwischen. :param values: Werte aus der JSON-Datei :param data: Daten aus der API :return:
Fügt Elemente einer Liste zu einem String zusammen mit jeweils einem Delimiter dazwischen.
def join(values: dict, data: StepData): """Fügt Elemente einer Liste zu einem String zusammen mit jeweils einem Delimiter dazwischen. :param values: Werte aus der JSON-Datei :param data: Daten aus der API :return: """ for idx, key in data.loop_key(values["keys"], values): value = data.get_data(key, values) new_key = get_new_keys(values, idx) delimiter = data.format(values.get("delimiter", ""), values) new_value = delimiter.join(value) data.insert_data(new_key, new_value, values)
[ "def", "join", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "for", "idx", ",", "key", "in", "data", ".", "loop_key", "(", "values", "[", "\"keys\"", "]", ",", "values", ")", ":", "value", "=", "data", ".", "get_data", "(", "key", ",", "values", ")", "new_key", "=", "get_new_keys", "(", "values", ",", "idx", ")", "delimiter", "=", "data", ".", "format", "(", "values", ".", "get", "(", "\"delimiter\"", ",", "\"\"", ")", ",", "values", ")", "new_value", "=", "delimiter", ".", "join", "(", "value", ")", "data", ".", "insert_data", "(", "new_key", ",", "new_value", ",", "values", ")" ]
[ 601, 0 ]
[ 615, 52 ]
null
python
de
['de', 'de', 'de']
True
true
null
MQTTclass.doSignal
(self, msgObj:dict=None)
Message über signal weiterleiten, <MQTT_BASE> wird dabei aus topic entfernt. .. code:: <MQTT_BASE>/stat/# Parameters ---------- msg : dict, optional dict mit payload. The default is None. .. code:: - topic: <MQTT_BASE>/stat/# - payload: beliebig Returns ------- None.
Message über signal weiterleiten, <MQTT_BASE> wird dabei aus topic entfernt.
def doSignal(self, msgObj:dict=None): """Message über signal weiterleiten, <MQTT_BASE> wird dabei aus topic entfernt. .. code:: <MQTT_BASE>/stat/# Parameters ---------- msg : dict, optional dict mit payload. The default is None. .. code:: - topic: <MQTT_BASE>/stat/# - payload: beliebig Returns ------- None. """ self.signal.send( self.decodeMsgObj( msgObj, basetopicReplace = True ) )
[ "def", "doSignal", "(", "self", ",", "msgObj", ":", "dict", "=", "None", ")", ":", "self", ".", "signal", ".", "send", "(", "self", ".", "decodeMsgObj", "(", "msgObj", ",", "basetopicReplace", "=", "True", ")", ")" ]
[ 687, 4 ]
[ 708, 80 ]
null
python
de
['de', 'de', 'de']
True
true
null
calculate_mean
(values: dict, data: StepData)
Berechnet den Mittelwert von Werten, die in einem Array stehen. :param values: Werte aus der JSON-Datei :param data: Daten aus der API :return:
Berechnet den Mittelwert von Werten, die in einem Array stehen.
def calculate_mean(values: dict, data: StepData): """Berechnet den Mittelwert von Werten, die in einem Array stehen. :param values: Werte aus der JSON-Datei :param data: Daten aus der API :return: """ for idx, key in data.loop_key(values["keys"], values): value = data.get_data(key, values) inner_key = values.get("innerKey", None) if inner_key: value = [reduce(operator.getitem, inner_key[0].split('|'), x) for x in value] new_key = get_new_keys(values, idx) mean_value = float(np.mean(value)) if values.get("decimal", None): new_value = round(mean_value, data.get_data(values["decimal"], values, numbers.Number)) else: new_value = round(mean_value) data.insert_data(new_key, new_value, values)
[ "def", "calculate_mean", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "for", "idx", ",", "key", "in", "data", ".", "loop_key", "(", "values", "[", "\"keys\"", "]", ",", "values", ")", ":", "value", "=", "data", ".", "get_data", "(", "key", ",", "values", ")", "inner_key", "=", "values", ".", "get", "(", "\"innerKey\"", ",", "None", ")", "if", "inner_key", ":", "value", "=", "[", "reduce", "(", "operator", ".", "getitem", ",", "inner_key", "[", "0", "]", ".", "split", "(", "'|'", ")", ",", "x", ")", "for", "x", "in", "value", "]", "new_key", "=", "get_new_keys", "(", "values", ",", "idx", ")", "mean_value", "=", "float", "(", "np", ".", "mean", "(", "value", ")", ")", "if", "values", ".", "get", "(", "\"decimal\"", ",", "None", ")", ":", "new_value", "=", "round", "(", "mean_value", ",", "data", ".", "get_data", "(", "values", "[", "\"decimal\"", "]", ",", "values", ",", "numbers", ".", "Number", ")", ")", "else", ":", "new_value", "=", "round", "(", "mean_value", ")", "data", ".", "insert_data", "(", "new_key", ",", "new_value", ",", "values", ")" ]
[ 29, 0 ]
[ 47, 52 ]
null
python
de
['de', 'de', 'de']
True
true
null
ReloadSettings
(jsonData)
return
Erneutes Laden der Einstellungen
Erneutes Laden der Einstellungen
def ReloadSettings(jsonData): ''' Erneutes Laden der Einstellungen ''' global myScriptSettings, mySettingsFile global myLogger # Execute json reloading here myScriptSettings.__dict__ = json.loads(jsonData) myScriptSettings.Save(mySettingsFile) # Meldung ins Log schreiben myLogger.WriteLog("") myLogger.WriteLog( " ---------------------------------------------------------------") myLogger.WriteLog( " ---- Konfiguration neu eingelesen ----") myLogger.WriteLog( " ---------------------------------------------------------------") myLogger.WriteLog("") return
[ "def", "ReloadSettings", "(", "jsonData", ")", ":", "global", "myScriptSettings", ",", "mySettingsFile", "global", "myLogger", "# Execute json reloading here\r", "myScriptSettings", ".", "__dict__", "=", "json", ".", "loads", "(", "jsonData", ")", "myScriptSettings", ".", "Save", "(", "mySettingsFile", ")", "# Meldung ins Log schreiben\r", "myLogger", ".", "WriteLog", "(", "\"\"", ")", "myLogger", ".", "WriteLog", "(", "\" ---------------------------------------------------------------\"", ")", "myLogger", ".", "WriteLog", "(", "\" ---- Konfiguration neu eingelesen ----\"", ")", "myLogger", ".", "WriteLog", "(", "\" ---------------------------------------------------------------\"", ")", "myLogger", ".", "WriteLog", "(", "\"\"", ")", "return" ]
[ 174, 0 ]
[ 193, 10 ]
null
python
de
['de', 'de', 'de']
True
true
null
PdfGenerator.html
( self, html="", area:dict={}, attrs:dict={}, render=None )
return element_html
HTML einfügen. Parameters ---------- html : str area : Area {left,top,with,height} die Größe der Ausgabe attrs : dict zu ändernde id class oder Style Angaben render : bool sofort rendern oder nur zurückgeben ohne Angabe wird self.autoRender verwendet Returns ------- element_html: str HTML des erzeugten Elements
HTML einfügen.
def html( self, html="", area:dict={}, attrs:dict={}, render=None ): """HTML einfügen. Parameters ---------- html : str area : Area {left,top,with,height} die Größe der Ausgabe attrs : dict zu ändernde id class oder Style Angaben render : bool sofort rendern oder nur zurückgeben ohne Angabe wird self.autoRender verwendet Returns ------- element_html: str HTML des erzeugten Elements """ if render == None: render = self.autoRender # Eigenschaften des Elements _id, _class, _style = self._get_attrs( attrs ) _area = self._get_area_style( area ) element_html = '\n\t<div class="html {_class}" style="{_style} {_area}" >{content}</div>'.format( _class = _class, _style = _style, _area = _area, content = html ) if render: self._html( element_html ) return element_html
[ "def", "html", "(", "self", ",", "html", "=", "\"\"", ",", "area", ":", "dict", "=", "{", "}", ",", "attrs", ":", "dict", "=", "{", "}", ",", "render", "=", "None", ")", ":", "if", "render", "==", "None", ":", "render", "=", "self", ".", "autoRender", "# Eigenschaften des Elements", "_id", ",", "_class", ",", "_style", "=", "self", ".", "_get_attrs", "(", "attrs", ")", "_area", "=", "self", ".", "_get_area_style", "(", "area", ")", "element_html", "=", "'\\n\\t<div class=\"html {_class}\" style=\"{_style} {_area}\" >{content}</div>'", ".", "format", "(", "_class", "=", "_class", ",", "_style", "=", "_style", ",", "_area", "=", "_area", ",", "content", "=", "html", ")", "if", "render", ":", "self", ".", "_html", "(", "element_html", ")", "return", "element_html" ]
[ 824, 4 ]
[ 860, 27 ]
null
python
de
['de', 'de', 'de']
False
true
null
default
(values: dict, data: StepData, config: dict)
Generiert eine Audiodatei mit der Python-Bibliothek gTTS. Wenn in der Konfiguration `sub_pairs` angegeben sind, werden diese den bisherigen `sub_pairs` hinzugefügt. `sub_pairs` sind bestimmte Wörter, die im Text ersetzt werden sollen. Beispiel: "z.B." soll vorgelesen werden als "zum Beispiel". :param values: Werte aus der JSON-Datei :param data: Daten aus der API :param config: Daten aus der Konfigurationsdatei :return:
Generiert eine Audiodatei mit der Python-Bibliothek gTTS.
def default(values: dict, data: StepData, config: dict): """Generiert eine Audiodatei mit der Python-Bibliothek gTTS. Wenn in der Konfiguration `sub_pairs` angegeben sind, werden diese den bisherigen `sub_pairs` hinzugefügt. `sub_pairs` sind bestimmte Wörter, die im Text ersetzt werden sollen. Beispiel: "z.B." soll vorgelesen werden als "zum Beispiel". :param values: Werte aus der JSON-Datei :param data: Daten aus der API :param config: Daten aus der Konfigurationsdatei :return: """ for key in values: text = part.audio_parts(values[key]["parts"], data) if text[1]: # wird ausgeführt, falls die Audio nur aus angegebenem Text besteht values[key] = _text_to_audio(data, values, text[0], config) else: # wird ausgeführt, falls die Audio auch statische Dateien oder lautlose Audios enthält audio_list = [] for item in values[key]["parts"]: if item["type"] == "text": audio_list.append(_text_to_audio(data, values, item["pattern"], config)) if item["type"] == "file": audio_list.append(resources.get_audio_path(item["path"])) if item["type"] == "random_text": len_pattern = len(item["pattern"]) if len_pattern == 1: audio_list.append(_text_to_audio(data, values, item["pattern"][0], config)) else: rand = randint(0, len_pattern - 1) audio_list.append(_text_to_audio(data, values, item["pattern"][rand], config)) if item["type"] == "silent": duration = item["duration"] * 1000 silence = AudioSegment.silent(duration=duration) silent_audio_file_path = resources.new_temp_resource_path(data.data["_pipe_id"], "mp3") silence.export(silent_audio_file_path, format="mp3") audio_list.append(silent_audio_file_path) # values[key] = _audios_to_audio(audio_list, data) values[key] = combine_audios(audio_list, data)
[ "def", "default", "(", "values", ":", "dict", ",", "data", ":", "StepData", ",", "config", ":", "dict", ")", ":", "for", "key", "in", "values", ":", "text", "=", "part", ".", "audio_parts", "(", "values", "[", "key", "]", "[", "\"parts\"", "]", ",", "data", ")", "if", "text", "[", "1", "]", ":", "# wird ausgeführt, falls die Audio nur aus angegebenem Text besteht", "values", "[", "key", "]", "=", "_text_to_audio", "(", "data", ",", "values", ",", "text", "[", "0", "]", ",", "config", ")", "else", ":", "# wird ausgeführt, falls die Audio auch statische Dateien oder lautlose Audios enthält", "audio_list", "=", "[", "]", "for", "item", "in", "values", "[", "key", "]", "[", "\"parts\"", "]", ":", "if", "item", "[", "\"type\"", "]", "==", "\"text\"", ":", "audio_list", ".", "append", "(", "_text_to_audio", "(", "data", ",", "values", ",", "item", "[", "\"pattern\"", "]", ",", "config", ")", ")", "if", "item", "[", "\"type\"", "]", "==", "\"file\"", ":", "audio_list", ".", "append", "(", "resources", ".", "get_audio_path", "(", "item", "[", "\"path\"", "]", ")", ")", "if", "item", "[", "\"type\"", "]", "==", "\"random_text\"", ":", "len_pattern", "=", "len", "(", "item", "[", "\"pattern\"", "]", ")", "if", "len_pattern", "==", "1", ":", "audio_list", ".", "append", "(", "_text_to_audio", "(", "data", ",", "values", ",", "item", "[", "\"pattern\"", "]", "[", "0", "]", ",", "config", ")", ")", "else", ":", "rand", "=", "randint", "(", "0", ",", "len_pattern", "-", "1", ")", "audio_list", ".", "append", "(", "_text_to_audio", "(", "data", ",", "values", ",", "item", "[", "\"pattern\"", "]", "[", "rand", "]", ",", "config", ")", ")", "if", "item", "[", "\"type\"", "]", "==", "\"silent\"", ":", "duration", "=", "item", "[", "\"duration\"", "]", "*", "1000", "silence", "=", "AudioSegment", ".", "silent", "(", "duration", "=", "duration", ")", "silent_audio_file_path", "=", "resources", ".", "new_temp_resource_path", "(", "data", ".", "data", "[", "\"_pipe_id\"", "]", ",", "\"mp3\"", ")", "silence", ".", "export", "(", "silent_audio_file_path", ",", "format", "=", "\"mp3\"", ")", "audio_list", ".", "append", "(", "silent_audio_file_path", ")", "# values[key] = _audios_to_audio(audio_list, data)", "values", "[", "key", "]", "=", "combine_audios", "(", "audio_list", ",", "data", ")" ]
[ 60, 0 ]
[ 100, 58 ]
null
python
de
['de', 'de', 'de']
True
true
null
calculate
(values: dict, data: StepData)
Berechnet die angegebene `"action"`. :param values: Werte aus der JSON-Datei :param data: Daten aus der API
Berechnet die angegebene `"action"`.
def calculate(values: dict, data: StepData): """Berechnet die angegebene `"action"`. :param values: Werte aus der JSON-Datei :param data: Daten aus der API """ action_func = get_type_func(values, CALCULATE_ACTIONS, "action") action_func(values, data)
[ "def", "calculate", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "action_func", "=", "get_type_func", "(", "values", ",", "CALCULATE_ACTIONS", ",", "\"action\"", ")", "action_func", "(", "values", ",", "data", ")" ]
[ 74, 0 ]
[ 82, 29 ]
null
python
de
['de', 'de', 'de']
True
true
null
handle_setup
(daten)
konfiguriere einen Pin
konfiguriere einen Pin
def handle_setup(daten): """konfiguriere einen Pin""" global konfiguration if type(daten) == str: daten = json.loads(daten) if validiere_daten(daten, "pin", "richtung"): richtung = daten["richtung"] pin = daten["pin"] # ueberpruefe, ob die Richtung zulaessig ist if richtung not in [GPIO.IN, GPIO.OUT]: emit("fehler", {"text": "Ungueltige Richtung"}) return GPIO.setup(pin, richtung) # loesche die Callback-Funktion fuer den Pin, wenn er als Eingang konfiguriert wurde, # da eine Callback-Funktion nicht zweimal fuer einen Pin hinzugefuegt werden darf if pin in konfiguration["pins"] and konfiguration["pins"][pin]["richtung"] == GPIO.IN: GPIO.remove_event_detect(pin) if richtung == GPIO.IN: # setze die Callback-Funktion, sodass die Benutzer benachrichtigt werden, # falls sich der Status des Pins aendert GPIO.add_event_detect(pin, GPIO.BOTH, callback=input_callback) # sende allen Benutzern die Konfiguration des Pins status = GPIO.input(pin) konfiguration["pins"][pin] = {"richtung": richtung, "status": status} socketio.emit("setup", {"pin": pin, "richtung": richtung, "status": status})
[ "def", "handle_setup", "(", "daten", ")", ":", "global", "konfiguration", "if", "type", "(", "daten", ")", "==", "str", ":", "daten", "=", "json", ".", "loads", "(", "daten", ")", "if", "validiere_daten", "(", "daten", ",", "\"pin\"", ",", "\"richtung\"", ")", ":", "richtung", "=", "daten", "[", "\"richtung\"", "]", "pin", "=", "daten", "[", "\"pin\"", "]", "# ueberpruefe, ob die Richtung zulaessig ist", "if", "richtung", "not", "in", "[", "GPIO", ".", "IN", ",", "GPIO", ".", "OUT", "]", ":", "emit", "(", "\"fehler\"", ",", "{", "\"text\"", ":", "\"Ungueltige Richtung\"", "}", ")", "return", "GPIO", ".", "setup", "(", "pin", ",", "richtung", ")", "# loesche die Callback-Funktion fuer den Pin, wenn er als Eingang konfiguriert wurde,", "# da eine Callback-Funktion nicht zweimal fuer einen Pin hinzugefuegt werden darf", "if", "pin", "in", "konfiguration", "[", "\"pins\"", "]", "and", "konfiguration", "[", "\"pins\"", "]", "[", "pin", "]", "[", "\"richtung\"", "]", "==", "GPIO", ".", "IN", ":", "GPIO", ".", "remove_event_detect", "(", "pin", ")", "if", "richtung", "==", "GPIO", ".", "IN", ":", "# setze die Callback-Funktion, sodass die Benutzer benachrichtigt werden,", "# falls sich der Status des Pins aendert", "GPIO", ".", "add_event_detect", "(", "pin", ",", "GPIO", ".", "BOTH", ",", "callback", "=", "input_callback", ")", "# sende allen Benutzern die Konfiguration des Pins", "status", "=", "GPIO", ".", "input", "(", "pin", ")", "konfiguration", "[", "\"pins\"", "]", "[", "pin", "]", "=", "{", "\"richtung\"", ":", "richtung", ",", "\"status\"", ":", "status", "}", "socketio", ".", "emit", "(", "\"setup\"", ",", "{", "\"pin\"", ":", "pin", ",", "\"richtung\"", ":", "richtung", ",", "\"status\"", ":", "status", "}", ")" ]
[ 161, 0 ]
[ 185, 84 ]
null
python
de
['de', 'de', 'de']
True
true
null
qa_mlc.FWHM_findLeafs
( self, leafs:list=None, lfd:int=0, variante:str="" )
return { 'lfd': lfd, 'filename': self.infos["filename"], 'Kennung': self._kennung.format( **self.infos ), 'checkPositions': checkPositions, 'Richtung' : richtung, 'Datum': self.infos['AcquisitionDateTime'], 'unit': self.infos['unit'], 'energy': self.infos['energy'], 'gantry' : self.infos['gantry'], 'collimator': self.infos['collimator'], "fwxm.data" : leafData["fwxm"], "fwxm.min" : np.min( fwxm_array ), "fwxm.max" : np.max( fwxm_array ), "fwxm.mean" : np.mean( fwxm_array ), "shift.data" : leafData["shift"], "shift.min" : np.min( shift_array ), "shift.max" : np.max( shift_array ), "shift.mean" : np.mean( shift_array ) }
Öffungsbreite und Position aller Leafpaare bestimmen Sucht über die Mitte jedes Leafpaares die Öffnugsbreite und Positionsabweichung zum Zentrum bei 50% Halbschatten Gibt die Werte für jedes leafpaar und min,max,mean für alle zurück Parameters ---------- leafs : list, optional Auszuwertende Leafnummern bei none werden 1-60 verwendet. The default is None. lfd : int, optional lfd Angabe in der Rückgabe. The default is 0. variante : str, optional durchzuführende testvariante. The default is "". Returns ------- dict lfd: int - Angabe aus Parameters filename: str - filename aus infos Kennung: checkPositions: list - die geprüften Positionen Richtung: str ('klein > groß'|'groß > klein') - über infos variante bestimmt Datum unit energy gantry collimator fwxm.data fwxm.min fwxm.max fwxm.mean: shift.data: shift.min: shift.max: shift.mean.
Öffungsbreite und Position aller Leafpaare bestimmen
def FWHM_findLeafs( self, leafs:list=None, lfd:int=0, variante:str="" ): """Öffungsbreite und Position aller Leafpaare bestimmen Sucht über die Mitte jedes Leafpaares die Öffnugsbreite und Positionsabweichung zum Zentrum bei 50% Halbschatten Gibt die Werte für jedes leafpaar und min,max,mean für alle zurück Parameters ---------- leafs : list, optional Auszuwertende Leafnummern bei none werden 1-60 verwendet. The default is None. lfd : int, optional lfd Angabe in der Rückgabe. The default is 0. variante : str, optional durchzuführende testvariante. The default is "". Returns ------- dict lfd: int - Angabe aus Parameters filename: str - filename aus infos Kennung: checkPositions: list - die geprüften Positionen Richtung: str ('klein > groß'|'groß > klein') - über infos variante bestimmt Datum unit energy gantry collimator fwxm.data fwxm.min fwxm.max fwxm.mean: shift.data: shift.min: shift.max: shift.mean. """ # alle Leaf center Positionen bestimmen checkPositions = self.getLeafCenterPositions() if leafs is None: # Leafs 1 bis 60, bei arrange 61 damit 60 im Ergebnis ist leafs = np.arange( 1, 61, 1 ) leafData = { "fwxm": {}, "shift": {} } # je nach subTag Richtung angeben richtung = "" if self.infos["varianten"][ variante ] == "vonklein": richtung = "klein > groß" elif self.infos["varianten"][ variante ] == "vongross": richtung = "groß > klein" # Aufbereitung für dataframe #df_data = {} leaf = 0 for p in checkPositions: leaf = leaf + 1 # nur wenn die Leafnummer ausgewertet werden soll if leaf in leafs: # Abhängigkeit von der Kollimatorrotation if self.infos["collimator"] == 90 or self.infos["collimator"] == 270: # umdrehen für 270 oder 90 profile = SingleProfile( self.image.array[ : , self.image.mm2dots_X( p ) ] ) else: profile = SingleProfile( self.image.array[ self.image.mm2dots_Y( p ) ] ) # Abstand der Lamellen bei 50% leafData["fwxm"][p] = profile.fwxm( ) / self.image.dpmm # Zentrumsversatz bei 50% bestimmen leafData["shift"][p] = ( (len(profile.values) / 2) - profile.fwxm_center( ) ) / self.image.dpmm # die eigentlichen Werte in ein array übernehmen fwxm_array = np.array( list( leafData["fwxm"].values() ) ) shift_array = np.array( list( leafData["shift"].values() ) ) # Daten so zurückgeben das sofort ein dataframe möglich ist return { 'lfd': lfd, 'filename': self.infos["filename"], 'Kennung': self._kennung.format( **self.infos ), 'checkPositions': checkPositions, 'Richtung' : richtung, 'Datum': self.infos['AcquisitionDateTime'], 'unit': self.infos['unit'], 'energy': self.infos['energy'], 'gantry' : self.infos['gantry'], 'collimator': self.infos['collimator'], "fwxm.data" : leafData["fwxm"], "fwxm.min" : np.min( fwxm_array ), "fwxm.max" : np.max( fwxm_array ), "fwxm.mean" : np.mean( fwxm_array ), "shift.data" : leafData["shift"], "shift.min" : np.min( shift_array ), "shift.max" : np.max( shift_array ), "shift.mean" : np.mean( shift_array ) }
[ "def", "FWHM_findLeafs", "(", "self", ",", "leafs", ":", "list", "=", "None", ",", "lfd", ":", "int", "=", "0", ",", "variante", ":", "str", "=", "\"\"", ")", ":", "# alle Leaf center Positionen bestimmen", "checkPositions", "=", "self", ".", "getLeafCenterPositions", "(", ")", "if", "leafs", "is", "None", ":", "# Leafs 1 bis 60, bei arrange 61 damit 60 im Ergebnis ist", "leafs", "=", "np", ".", "arange", "(", "1", ",", "61", ",", "1", ")", "leafData", "=", "{", "\"fwxm\"", ":", "{", "}", ",", "\"shift\"", ":", "{", "}", "}", "# je nach subTag Richtung angeben", "richtung", "=", "\"\"", "if", "self", ".", "infos", "[", "\"varianten\"", "]", "[", "variante", "]", "==", "\"vonklein\"", ":", "richtung", "=", "\"klein > groß\"", "elif", "self", ".", "infos", "[", "\"varianten\"", "]", "[", "variante", "]", "==", "\"vongross\"", ":", "richtung", "=", "\"groß > klein\"", "# Aufbereitung für dataframe", "#df_data = {}", "leaf", "=", "0", "for", "p", "in", "checkPositions", ":", "leaf", "=", "leaf", "+", "1", "# nur wenn die Leafnummer ausgewertet werden soll", "if", "leaf", "in", "leafs", ":", "# Abhängigkeit von der Kollimatorrotation", "if", "self", ".", "infos", "[", "\"collimator\"", "]", "==", "90", "or", "self", ".", "infos", "[", "\"collimator\"", "]", "==", "270", ":", "# umdrehen für 270 oder 90", "profile", "=", "SingleProfile", "(", "self", ".", "image", ".", "array", "[", ":", ",", "self", ".", "image", ".", "mm2dots_X", "(", "p", ")", "]", ")", "else", ":", "profile", "=", "SingleProfile", "(", "self", ".", "image", ".", "array", "[", "self", ".", "image", ".", "mm2dots_Y", "(", "p", ")", "]", ")", "# Abstand der Lamellen bei 50%", "leafData", "[", "\"fwxm\"", "]", "[", "p", "]", "=", "profile", ".", "fwxm", "(", ")", "/", "self", ".", "image", ".", "dpmm", "# Zentrumsversatz bei 50% bestimmen", "leafData", "[", "\"shift\"", "]", "[", "p", "]", "=", "(", "(", "len", "(", "profile", ".", "values", ")", "/", "2", ")", "-", "profile", ".", "fwxm_center", "(", ")", ")", "/", "self", ".", "image", ".", "dpmm", "# die eigentlichen Werte in ein array übernehmen", "fwxm_array", "=", "np", ".", "array", "(", "list", "(", "leafData", "[", "\"fwxm\"", "]", ".", "values", "(", ")", ")", ")", "shift_array", "=", "np", ".", "array", "(", "list", "(", "leafData", "[", "\"shift\"", "]", ".", "values", "(", ")", ")", ")", "# Daten so zurückgeben das sofort ein dataframe möglich ist", "return", "{", "'lfd'", ":", "lfd", ",", "'filename'", ":", "self", ".", "infos", "[", "\"filename\"", "]", ",", "'Kennung'", ":", "self", ".", "_kennung", ".", "format", "(", "*", "*", "self", ".", "infos", ")", ",", "'checkPositions'", ":", "checkPositions", ",", "'Richtung'", ":", "richtung", ",", "'Datum'", ":", "self", ".", "infos", "[", "'AcquisitionDateTime'", "]", ",", "'unit'", ":", "self", ".", "infos", "[", "'unit'", "]", ",", "'energy'", ":", "self", ".", "infos", "[", "'energy'", "]", ",", "'gantry'", ":", "self", ".", "infos", "[", "'gantry'", "]", ",", "'collimator'", ":", "self", ".", "infos", "[", "'collimator'", "]", ",", "\"fwxm.data\"", ":", "leafData", "[", "\"fwxm\"", "]", ",", "\"fwxm.min\"", ":", "np", ".", "min", "(", "fwxm_array", ")", ",", "\"fwxm.max\"", ":", "np", ".", "max", "(", "fwxm_array", ")", ",", "\"fwxm.mean\"", ":", "np", ".", "mean", "(", "fwxm_array", ")", ",", "\"shift.data\"", ":", "leafData", "[", "\"shift\"", "]", ",", "\"shift.min\"", ":", "np", ".", "min", "(", "shift_array", ")", ",", "\"shift.max\"", ":", "np", ".", "max", "(", "shift_array", ")", ",", "\"shift.mean\"", ":", "np", ".", "mean", "(", "shift_array", ")", "}" ]
[ 226, 4 ]
[ 330, 9 ]
null
python
de
['de', 'de', 'de']
True
true
null
calculate_min
(values: dict, data: StepData)
Findet den Minimalwert von Werten, die in einem Array stehen. :param values: Werte aus der JSON-Datei :param data: Daten aus der API
Findet den Minimalwert von Werten, die in einem Array stehen.
def calculate_min(values: dict, data: StepData): """Findet den Minimalwert von Werten, die in einem Array stehen. :param values: Werte aus der JSON-Datei :param data: Daten aus der API """ for idx, key in data.loop_key(values["keys"], values): value = data.get_data(key, values) inner_key = values.get("innerKey", None) if inner_key: value = [reduce(operator.getitem, inner_key[0].split('|'), x) for x in value] new_key = get_new_keys(values, idx) new_value = min(value) if values.get("decimal", None): new_value = round(new_value, data.get_data(values["decimal"], values, numbers.Number)) else: new_value = round(new_value, 2) data.insert_data(new_key, new_value, values) if values.get("save_idx_to", None): data.insert_data(values["save_idx_to"][idx], value.index(new_value), values)
[ "def", "calculate_min", "(", "values", ":", "dict", ",", "data", ":", "StepData", ")", ":", "for", "idx", ",", "key", "in", "data", ".", "loop_key", "(", "values", "[", "\"keys\"", "]", ",", "values", ")", ":", "value", "=", "data", ".", "get_data", "(", "key", ",", "values", ")", "inner_key", "=", "values", ".", "get", "(", "\"innerKey\"", ",", "None", ")", "if", "inner_key", ":", "value", "=", "[", "reduce", "(", "operator", ".", "getitem", ",", "inner_key", "[", "0", "]", ".", "split", "(", "'|'", ")", ",", "x", ")", "for", "x", "in", "value", "]", "new_key", "=", "get_new_keys", "(", "values", ",", "idx", ")", "new_value", "=", "min", "(", "value", ")", "if", "values", ".", "get", "(", "\"decimal\"", ",", "None", ")", ":", "new_value", "=", "round", "(", "new_value", ",", "data", ".", "get_data", "(", "values", "[", "\"decimal\"", "]", ",", "values", ",", "numbers", ".", "Number", ")", ")", "else", ":", "new_value", "=", "round", "(", "new_value", ",", "2", ")", "data", ".", "insert_data", "(", "new_key", ",", "new_value", ",", "values", ")", "if", "values", ".", "get", "(", "\"save_idx_to\"", ",", "None", ")", ":", "data", ".", "insert_data", "(", "values", "[", "\"save_idx_to\"", "]", "[", "idx", "]", ",", "value", ".", "index", "(", "new_value", ")", ",", "values", ")" ]
[ 99, 0 ]
[ 119, 88 ]
null
python
de
['de', 'de', 'de']
True
true
null
OpenWeatherMap.show
(self)
Anzeige des Wetters
Anzeige des Wetters
def show(self): """ Anzeige des Wetters """ self.get_data() SS_WEATHER_WIN.fill(colors['weather_bg']) if bg_buf['weather_bg']: SS_WEATHER_WIN.blit(bg_buf['weather_bg'], (0, 0), area=SS_WEATHER_RECT) fc_height = fonts['big'].get_height() // 4 draw_text(LCD, 'Wetter für ' + self.stadt, fonts['big'], colors['weather_font'], align='centerx', pos=(0, fc_height)) fc_height = fonts['big'].get_height() * 5 // 4 draw_text(LCD, 'Jetzt: ' + self.temperatur + '°C' + ' / ' + \ self.luftdruck + 'mb' + ' / ' + self.luftfeuchte + '%rel.', fonts['big'], colors['weather_font'], align='centerx', pos=(0, fc_height)) fc_height = fc_height + fonts['big'].get_height() draw_text(LCD, 'Heute', fonts['std'], colors['weather_font'], align='centerx', pos=(-SS_WEATHER_WIN.get_width() // 4, fc_height)) draw_text(LCD, 'Morgen', fonts['std'], colors['weather_font'], align='centerx', pos=(SS_WEATHER_WIN.get_width() // 4, fc_height)) icon = os.path.join(WEATHERPATH, self.wetterlage + '.png') if not os.path.exists(icon): icon = os.path.join(WEATHERPATH, 'na.png') icon2 = os.path.join(WEATHERPATH, self.vorschau + '.png') if not os.path.exists(icon2): icon2 = os.path.join(WEATHERPATH, 'na.png') icon = pygame.image.load(icon).convert_alpha() icon2 = pygame.image.load(icon2).convert_alpha() icon = pygame.transform.smoothscale(icon, (SS_WEATHER_WIN.get_height() * 8 // 16, SS_WEATHER_WIN.get_height() * 8 // 16)) icon2 = pygame.transform.smoothscale(icon2, (SS_WEATHER_WIN.get_height() * 8 // 16, SS_WEATHER_WIN.get_height() * 8 // 16)) fc_height = fc_height + fonts['std'].get_height() LCD.blit(icon, (SS_WEATHER_WIN.get_width() // 4 - icon.get_width() // 2, fc_height)) LCD.blit(icon2, (SS_WEATHER_WIN.get_width() * 3 // 4 - icon.get_width() // 2, fc_height)) fc_height = fc_height + icon.get_height() heute_text = self.heute_min + '/' + self.heute_max + '°C' draw_text(LCD, heute_text, fonts['std'], colors['weather_font'], align='centerx', pos=(-SS_WEATHER_WIN.get_width() // 4, fc_height)) morgen_text = self.morgen_min + '/' + self.morgen_max + '°C' draw_text(LCD, morgen_text, fonts['std'], colors['weather_font'], align='centerx', pos=(SS_WEATHER_WIN.get_width() // 4, fc_height)) pygame.display.update(SS_WEATHER_RECT)
[ "def", "show", "(", "self", ")", ":", "self", ".", "get_data", "(", ")", "SS_WEATHER_WIN", ".", "fill", "(", "colors", "[", "'weather_bg'", "]", ")", "if", "bg_buf", "[", "'weather_bg'", "]", ":", "SS_WEATHER_WIN", ".", "blit", "(", "bg_buf", "[", "'weather_bg'", "]", ",", "(", "0", ",", "0", ")", ",", "area", "=", "SS_WEATHER_RECT", ")", "fc_height", "=", "fonts", "[", "'big'", "]", ".", "get_height", "(", ")", "//", "4", "draw_text", "(", "LCD", ",", "'Wetter für ' ", " ", "elf.", "s", "tadt,", "", "fonts", "[", "'big'", "]", ",", "colors", "[", "'weather_font'", "]", ",", "align", "=", "'centerx'", ",", "pos", "=", "(", "0", ",", "fc_height", ")", ")", "fc_height", "=", "fonts", "[", "'big'", "]", ".", "get_height", "(", ")", "*", "5", "//", "4", "draw_text", "(", "LCD", ",", "'Jetzt: '", "+", "self", ".", "temperatur", "+", "'°C' ", " ", " / ' ", " ", "self", ".", "luftdruck", "+", "'mb'", "+", "' / '", "+", "self", ".", "luftfeuchte", "+", "'%rel.'", ",", "fonts", "[", "'big'", "]", ",", "colors", "[", "'weather_font'", "]", ",", "align", "=", "'centerx'", ",", "pos", "=", "(", "0", ",", "fc_height", ")", ")", "fc_height", "=", "fc_height", "+", "fonts", "[", "'big'", "]", ".", "get_height", "(", ")", "draw_text", "(", "LCD", ",", "'Heute'", ",", "fonts", "[", "'std'", "]", ",", "colors", "[", "'weather_font'", "]", ",", "align", "=", "'centerx'", ",", "pos", "=", "(", "-", "SS_WEATHER_WIN", ".", "get_width", "(", ")", "//", "4", ",", "fc_height", ")", ")", "draw_text", "(", "LCD", ",", "'Morgen'", ",", "fonts", "[", "'std'", "]", ",", "colors", "[", "'weather_font'", "]", ",", "align", "=", "'centerx'", ",", "pos", "=", "(", "SS_WEATHER_WIN", ".", "get_width", "(", ")", "//", "4", ",", "fc_height", ")", ")", "icon", "=", "os", ".", "path", ".", "join", "(", "WEATHERPATH", ",", "self", ".", "wetterlage", "+", "'.png'", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "icon", ")", ":", "icon", "=", "os", ".", "path", ".", "join", "(", "WEATHERPATH", ",", "'na.png'", ")", "icon2", "=", "os", ".", "path", ".", "join", "(", "WEATHERPATH", ",", "self", ".", "vorschau", "+", "'.png'", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "icon2", ")", ":", "icon2", "=", "os", ".", "path", ".", "join", "(", "WEATHERPATH", ",", "'na.png'", ")", "icon", "=", "pygame", ".", "image", ".", "load", "(", "icon", ")", ".", "convert_alpha", "(", ")", "icon2", "=", "pygame", ".", "image", ".", "load", "(", "icon2", ")", ".", "convert_alpha", "(", ")", "icon", "=", "pygame", ".", "transform", ".", "smoothscale", "(", "icon", ",", "(", "SS_WEATHER_WIN", ".", "get_height", "(", ")", "*", "8", "//", "16", ",", "SS_WEATHER_WIN", ".", "get_height", "(", ")", "*", "8", "//", "16", ")", ")", "icon2", "=", "pygame", ".", "transform", ".", "smoothscale", "(", "icon2", ",", "(", "SS_WEATHER_WIN", ".", "get_height", "(", ")", "*", "8", "//", "16", ",", "SS_WEATHER_WIN", ".", "get_height", "(", ")", "*", "8", "//", "16", ")", ")", "fc_height", "=", "fc_height", "+", "fonts", "[", "'std'", "]", ".", "get_height", "(", ")", "LCD", ".", "blit", "(", "icon", ",", "(", "SS_WEATHER_WIN", ".", "get_width", "(", ")", "//", "4", "-", "icon", ".", "get_width", "(", ")", "//", "2", ",", "fc_height", ")", ")", "LCD", ".", "blit", "(", "icon2", ",", "(", "SS_WEATHER_WIN", ".", "get_width", "(", ")", "*", "3", "//", "4", "-", "icon", ".", "get_width", "(", ")", "//", "2", ",", "fc_height", ")", ")", "fc_height", "=", "fc_height", "+", "icon", ".", "get_height", "(", ")", "heute_text", "=", "self", ".", "heute_min", "+", "'/'", "+", "self", ".", "heute_max", "+", "'°C'", "draw_text", "(", "LCD", ",", "heute_text", ",", "fonts", "[", "'std'", "]", ",", "colors", "[", "'weather_font'", "]", ",", "align", "=", "'centerx'", ",", "pos", "=", "(", "-", "SS_WEATHER_WIN", ".", "get_width", "(", ")", "//", "4", ",", "fc_height", ")", ")", "morgen_text", "=", "self", ".", "morgen_min", "+", "'/'", "+", "self", ".", "morgen_max", "+", "'°C'", "draw_text", "(", "LCD", ",", "morgen_text", ",", "fonts", "[", "'std'", "]", ",", "colors", "[", "'weather_font'", "]", ",", "align", "=", "'centerx'", ",", "pos", "=", "(", "SS_WEATHER_WIN", ".", "get_width", "(", ")", "//", "4", ",", "fc_height", ")", ")", "pygame", ".", "display", ".", "update", "(", "SS_WEATHER_RECT", ")" ]
[ 591, 4 ]
[ 634, 46 ]
null
python
de
['de', 'de', 'de']
True
true
null
ToolboxAgenda.get_shape_with_tag_item
(sld, tagKey)
return None
Shape auf Slide finden, das einen bestimmten TagKey enthaelt
Shape auf Slide finden, das einen bestimmten TagKey enthaelt
def get_shape_with_tag_item(sld, tagKey): ''' Shape auf Slide finden, das einen bestimmten TagKey enthaelt ''' for shp in sld.shapes: if shp.Tags.Item(tagKey) != "": return shp return None
[ "def", "get_shape_with_tag_item", "(", "sld", ",", "tagKey", ")", ":", "for", "shp", "in", "sld", ".", "shapes", ":", "if", "shp", ".", "Tags", ".", "Item", "(", "tagKey", ")", "!=", "\"\"", ":", "return", "shp", "return", "None" ]
[ 1247, 4 ]
[ 1252, 19 ]
null
python
de
['de', 'de', 'de']
True
true
null
XLSwriter.save
(self, fd=None)
return fd
Datei nach fd schreiben.
Datei nach fd schreiben.
def save(self, fd=None): """Datei nach fd schreiben.""" if not fd: fd = self.output assert fd self.book.save(fd) return fd
[ "def", "save", "(", "self", ",", "fd", "=", "None", ")", ":", "if", "not", "fd", ":", "fd", "=", "self", ".", "output", "assert", "fd", "self", ".", "book", ".", "save", "(", "fd", ")", "return", "fd" ]
[ 44, 4 ]
[ 50, 17 ]
null
python
de
['de', 'de', 'de']
True
true
null
ProgressBar.update
(self, value = 0)
return True
Aktualisiert den Fortschrittsbalken. Wenn value ausserhalb der Maximalen Grenzen liegt wird es auf den den Minimal bzw. Maximalwert begrenzt.
Aktualisiert den Fortschrittsbalken.
def update(self, value = 0): """ Aktualisiert den Fortschrittsbalken. Wenn value ausserhalb der Maximalen Grenzen liegt wird es auf den den Minimal bzw. Maximalwert begrenzt. """ if value < self.min: value = self.min if value > self.max: value = self.max if value == self.value: return False self.value = value # Ausrechnen wieviel Prozent erledigt sind percent_done = (float(value - self.min) / float(self.max - self.min)) * 100.0 percent_done = int(round(percent_done)) max_char = self.width - 2 num_hashes = int(round((percent_done / 100.0) * max_char)) if num_hashes == 0: self.progbar = "[>%s]" % (' '*(max_char-1)) elif num_hashes == max_char: self.progbar = "[%s]" % ('='*max_char) else: self.progbar = "[%s>%s]" % ('='*(num_hashes-1), ' '*(max_char-num_hashes)) # Prozentanzeige ungefaehr mittig einsetzen. percent_position = int((len(self.progbar) / 2) - len(str(percent_done))) percent_str = str(percent_done) + "%" self.progbar = ''.join([self.progbar[0:percent_position], percent_str, self.progbar[percent_position+len(percent_str):]]) return True
[ "def", "update", "(", "self", ",", "value", "=", "0", ")", ":", "if", "value", "<", "self", ".", "min", ":", "value", "=", "self", ".", "min", "if", "value", ">", "self", ".", "max", ":", "value", "=", "self", ".", "max", "if", "value", "==", "self", ".", "value", ":", "return", "False", "self", ".", "value", "=", "value", "# Ausrechnen wieviel Prozent erledigt sind", "percent_done", "=", "(", "float", "(", "value", "-", "self", ".", "min", ")", "/", "float", "(", "self", ".", "max", "-", "self", ".", "min", ")", ")", "*", "100.0", "percent_done", "=", "int", "(", "round", "(", "percent_done", ")", ")", "max_char", "=", "self", ".", "width", "-", "2", "num_hashes", "=", "int", "(", "round", "(", "(", "percent_done", "/", "100.0", ")", "*", "max_char", ")", ")", "if", "num_hashes", "==", "0", ":", "self", ".", "progbar", "=", "\"[>%s]\"", "%", "(", "' '", "*", "(", "max_char", "-", "1", ")", ")", "elif", "num_hashes", "==", "max_char", ":", "self", ".", "progbar", "=", "\"[%s]\"", "%", "(", "'='", "*", "max_char", ")", "else", ":", "self", ".", "progbar", "=", "\"[%s>%s]\"", "%", "(", "'='", "*", "(", "num_hashes", "-", "1", ")", ",", "' '", "*", "(", "max_char", "-", "num_hashes", ")", ")", "# Prozentanzeige ungefaehr mittig einsetzen.", "percent_position", "=", "int", "(", "(", "len", "(", "self", ".", "progbar", ")", "/", "2", ")", "-", "len", "(", "str", "(", "percent_done", ")", ")", ")", "percent_str", "=", "str", "(", "percent_done", ")", "+", "\"%\"", "self", ".", "progbar", "=", "''", ".", "join", "(", "[", "self", ".", "progbar", "[", "0", ":", "percent_position", "]", ",", "percent_str", ",", "self", ".", "progbar", "[", "percent_position", "+", "len", "(", "percent_str", ")", ":", "]", "]", ")", "return", "True" ]
[ 33, 4 ]
[ 66, 19 ]
null
python
de
['de', 'de', 'de']
True
true
null
suite
( testClass:None )
return suite
Fügt alle Funktionen, die mit test_ beginnen aus der angegeben Klasse der suite hinzu Parameters ---------- testClass : TYPE DESCRIPTION. Returns ------- suite : TYPE DESCRIPTION.
Fügt alle Funktionen, die mit test_ beginnen aus der angegeben Klasse der suite hinzu Parameters ---------- testClass : TYPE DESCRIPTION.
def suite( testClass:None ): '''Fügt alle Funktionen, die mit test_ beginnen aus der angegeben Klasse der suite hinzu Parameters ---------- testClass : TYPE DESCRIPTION. Returns ------- suite : TYPE DESCRIPTION. ''' if not testClass: testClass = WebAppTest suite = unittest.TestSuite( ) logger.setLevel( logging.WARNING ) # DEBUG WARNING if testClass: for m in dir( testClass ): if m.startswith('test_other_'): suite.addTest( testClass(m), ) pass elif m.startswith('test_mlc_'): suite.addTest( testClass(m), ) pass elif m.startswith('test_field_'): suite.addTest( testClass(m), ) pass return suite
[ "def", "suite", "(", "testClass", ":", "None", ")", ":", "if", "not", "testClass", ":", "testClass", "=", "WebAppTest", "suite", "=", "unittest", ".", "TestSuite", "(", ")", "logger", ".", "setLevel", "(", "logging", ".", "WARNING", ")", "# DEBUG WARNING ", "if", "testClass", ":", "for", "m", "in", "dir", "(", "testClass", ")", ":", "if", "m", ".", "startswith", "(", "'test_other_'", ")", ":", "suite", ".", "addTest", "(", "testClass", "(", "m", ")", ",", ")", "pass", "elif", "m", ".", "startswith", "(", "'test_mlc_'", ")", ":", "suite", ".", "addTest", "(", "testClass", "(", "m", ")", ",", ")", "pass", "elif", "m", ".", "startswith", "(", "'test_field_'", ")", ":", "suite", ".", "addTest", "(", "testClass", "(", "m", ")", ",", ")", "pass", "return", "suite" ]
[ 501, 0 ]
[ 536, 16 ]
null
python
de
['de', 'de', 'de']
True
true
null
DateiZugriff.lesen_alle
(self)
return daten
Zeilenweises einlesen der Daten und Rückgabe als Liste
Zeilenweises einlesen der Daten und Rückgabe als Liste
def lesen_alle(self): """ Zeilenweises einlesen der Daten und Rückgabe als Liste """ daten = [] with open(self.dateiname, "r") as f: for zeile in f: zeile = zeile.replace(",",".") # deutsches Komma in Austauschdatei zeile = zeile.rstrip("\n") daten = daten + [zeile] return daten
[ "def", "lesen_alle", "(", "self", ")", ":", "daten", "=", "[", "]", "with", "open", "(", "self", ".", "dateiname", ",", "\"r\"", ")", "as", "f", ":", "for", "zeile", "in", "f", ":", "zeile", "=", "zeile", ".", "replace", "(", "\",\"", ",", "\".\"", ")", "# deutsches Komma in Austauschdatei", "zeile", "=", "zeile", ".", "rstrip", "(", "\"\\n\"", ")", "daten", "=", "daten", "+", "[", "zeile", "]", "return", "daten" ]
[ 20, 4 ]
[ 28, 20 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispBase.__del__
(self)
Ausführung beim löschen der Instanz Alle vorhandenen figures entfernen
Ausführung beim löschen der Instanz Alle vorhandenen figures entfernen
def __del__(self): """Ausführung beim löschen der Instanz Alle vorhandenen figures entfernen """ for i in plt.get_fignums(): figure = plt.figure(i) axes = figure.get_axes() for ax in axes: figure.delaxes(ax) # alle Achsen entfernen try: figure.clf( ) # figures löschen plt.close( i ) # und entfernen except: # pragma: no cover pass
[ "def", "__del__", "(", "self", ")", ":", "for", "i", "in", "plt", ".", "get_fignums", "(", ")", ":", "figure", "=", "plt", ".", "figure", "(", "i", ")", "axes", "=", "figure", ".", "get_axes", "(", ")", "for", "ax", "in", "axes", ":", "figure", ".", "delaxes", "(", "ax", ")", "# alle Achsen entfernen", "try", ":", "figure", ".", "clf", "(", ")", "# figures löschen", "plt", ".", "close", "(", "i", ")", "# und entfernen", "except", ":", "# pragma: no cover", "pass" ]
[ 115, 4 ]
[ 129, 20 ]
null
python
de
['de', 'de', 'de']
True
true
null
ispDicom.archive_deleteSOPInstanceUID
(self, SOPInstanceUID)
return filename
Löscht ein Dataset aus dem File Archiv. Parameters ---------- SOPInstanceUID : TYPE Eine SOPInstanceUID. Returns ------- filename : str Der entfernte Dateiname
Löscht ein Dataset aus dem File Archiv.
def archive_deleteSOPInstanceUID(self, SOPInstanceUID): """Löscht ein Dataset aus dem File Archiv. Parameters ---------- SOPInstanceUID : TYPE Eine SOPInstanceUID. Returns ------- filename : str Der entfernte Dateiname """ exists, filename = self.archive_hasSOPInstanceUID( SOPInstanceUID ) if exists: os.remove( filename ) return filename
[ "def", "archive_deleteSOPInstanceUID", "(", "self", ",", "SOPInstanceUID", ")", ":", "exists", ",", "filename", "=", "self", ".", "archive_hasSOPInstanceUID", "(", "SOPInstanceUID", ")", "if", "exists", ":", "os", ".", "remove", "(", "filename", ")", "return", "filename" ]
[ 1103, 4 ]
[ 1123, 23 ]
null
python
de
['de', 'de', 'de']
True
true
null