code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
if self.session_cookie_secure: if not self.app.debug: self.app.config['SESSION_COOKIE_SECURE'] = True criteria = [ self.app.debug, flask.request.is_secure, flask.request.headers.get('X-Forwarded-Proto', 'http') == 'https', ] local_options = self._get_local_options() if local_options['force_https'] and not any(criteria): if flask.request.url.startswith('http://'): url = flask.request.url.replace('http://', 'https://', 1) code = 302 if self.force_https_permanent: code = 301 r = flask.redirect(url, code=code) return r
def _force_https(self)
Redirect any non-https requests to https. Based largely on flask-sslify.
2.770184
2.523136
1.097913
options = self._get_local_options() self._set_feature_headers(response.headers, options) self._set_frame_options_headers(response.headers, options) self._set_content_security_policy_headers(response.headers, options) self._set_hsts_headers(response.headers) self._set_referrer_policy_headers(response.headers) return response
def _set_response_headers(self, response)
Applies all configured headers to the given response.
2.82192
2.618477
1.077695
twohand = [] for item in TWOHANDED_WEAPONS: twohand.append([item]) onehand = [] for item in ONEHANDED_WEAPONS: onehand.append([item]) shield = SHIELDS dualwield_variations = [] weapon_shield_variations = [] for item in ONEHANDED_WEAPONS: for i in ONEHANDED_WEAPONS: dualwield_variations.append([item, i]) for j in shield: weapon_shield_variations.append([j, item]) return twohand + onehand + dualwield_variations + weapon_shield_variations
def init_weapon_list()
Initialize the possible weapon combinations.
2.863576
2.876279
0.995583
# When using smaller hash algorithms like MD5 or SHA1, # the number of bits does not provide enough information # to generate unique colors. Instead the hash is internally # appended by itself to fit the MINIMUM_HASH_LEN. # This leads to smaller hashes displaying less color # variatons, depicting the insecurity of small hashes. while (len(hashcode) < MINIMUM_HASH_LEN): chardiff = diff(len(hashcode), MINIMUM_HASH_LEN) if DEBUG: print ("Hashcode: %r with length: %d is too small. Appending difference." % (hashcode, len(hashcode))) hashcode += hashcode[:chardiff] if DEBUG: print ("Hash is now: %r with length: %d" % (hashcode, len(hashcode))) hashparts = split_sequence(hashcode, HEX_COLOR_LEN) colors = [] for i in range(COLOR_QUANTITY): colors.append(hex2rgb(hashparts[i])) if DEBUG: print ("Generated colors: %r" % colors) return colors
def grind_hash_for_colors(hashcode)
Extracts information from the hashcode to generate different colors. Returns a list of colors in (r,g,b) tupels.
6.451425
6.355663
1.015067
tokens = [] while seq: tokens.append(seq[:n]) seq = seq[n:] return tokens
def split_sequence(seq, n)
Generates tokens of length n from a sequence. The last token may be of smaller length.
2.991095
2.466327
1.212773
weaponlist = init_weapon_list() # The second six characters of the hash # control the weapon decision. weapon_control = hashcode[ASPECT_CONTROL_LEN:(ASPECT_CONTROL_LEN * 2)] # Decimal value of the hash chunk to map. hash_dec_value = int(weapon_control, HEX_BASE) decision = map_decision(MAX_DECISION_VALUE, len(weaponlist), hash_dec_value) return choose_weapon(decision, weaponlist)
def grind_hash_for_weapon(hashcode)
Grinds the given hashcode for a weapon to draw on the pixelmap. Utilizes the second six characters from the hashcode.
9.888868
9.334497
1.059389
choice = [] for i in range(len(weapons)): if (i < decision): choice = weapons[i] return choice
def choose_weapon(decision, weapons)
Chooses a weapon from a given list based on the decision.
4.124939
3.352498
1.230408
choice = [] for i in range(len(ASPECTSTYLES)): if (i < decision): choice = ASPECTSTYLES[i] return choice
def choose_aspect(decision)
Chooses a style from ASPECTSTYLES based on the decision.
5.880981
3.156437
1.863171
# Make sure a possible '#' char is eliminated # before processing the color. if ('#' in hexvalue): hexcolor = hexvalue.replace('#', '') else: hexcolor = hexvalue # Hex colors have a fixed length of 6 characters excluding the '#' # TODO: Include custom exception here, even if it should never happen. if (len(hexcolor) != 6): print ("Unexpected length of hex color value.\nSix characters excluding \'#\' expected.") return 0 # Convert each two characters of # the hex to an RGB color value. r = int(hexcolor[0:2], HEX_BASE) g = int(hexcolor[2:4], HEX_BASE) b = int(hexcolor[4:6], HEX_BASE) return r, g, b
def hex2rgb(hexvalue)
Converts a given hex color to its respective rgb color.
5.025384
4.980508
1.00901
default = ["pagan", "python", "avatar", "github"] slogan = request.forms.get("slogan") if not slogan: if request.get_cookie("hist1"): slogan = request.get_cookie("hist1") else: slogan = "pagan" if not request.get_cookie("hist1"): hist1, hist2, hist3, hist4 = default[:] else: hist1 = request.get_cookie("hist1") hist2 = request.get_cookie("hist2") hist3 = request.get_cookie("hist3") hist4 = request.get_cookie("hist4") if slogan in (hist1, hist2, hist3, hist4): history = [hist1, hist2, hist3, hist4] history.remove(slogan) hist1, hist2, hist3 = history[0], history[1], history[2] response.set_cookie("hist1", slogan, max_age=60*60*24*30, httponly=True) response.set_cookie("hist2", hist1, max_age=60*60*24*30, httponly=True) response.set_cookie("hist3", hist2, max_age=60*60*24*30, httponly=True) response.set_cookie("hist4", hist3, max_age=60*60*24*30, httponly=True) # slogan, hist1, hist2, hist3 = escape(slogan), escape(hist1),\ # escape(hist2), escape(hist3) md5 = hashlib.md5() md5.update(slogan) slogan_hash = md5.hexdigest() md5.update(hist1) hist1_hash = md5.hexdigest() md5.update(hist2) hist2_hash = md5.hexdigest() md5.update(hist3) hist3_hash = md5.hexdigest() return template(TEMPLATEINDEX, slogan=slogan, hist1=hist1, hist2=hist2, hist3=hist3, sloganHash=slogan_hash, hist1Hash=hist1_hash, hist2Hash=hist2_hash, hist3Hash=hist3_hash)
def index()
main functionality of webserver
1.796283
1.800985
0.99739
tmpf = tempfile.mkstemp(".png")[1] image = pagan.Avatar("") image.img = pagan.generator.generate_by_hash(hashvalue) image.save("/", tmpf) return static_file(tmpf, root="/")
def hashimage(hashvalue)
generate image by hash, usese tempfile :-/
9.174952
7.604202
1.206563
if hashfun not in generator.HASHES.keys(): print ("Unknown or unsupported hash function. Using default: %s" % self.DEFAULT_HASHFUN) algo = self.DEFAULT_HASHFUN else: algo = hashfun return generator.generate(inpt, algo)
def __create_image(self, inpt, hashfun)
Creates the avatar based on the input and the chosen hash function.
5.161932
5.030027
1.026224
self.img = self.__create_image(inpt, hashfun)
def change(self, inpt, hashfun=DEFAULT_HASHFUN)
Change the avatar by providing a new input. Uses the standard hash function if no one is given.
10.363965
7.270896
1.425404
# Creates a path when it does not exist. if not os.path.exists(path): os.makedirs(path) # Cut the .png file ending if one was omitted. if filename[-4:] == ".png": filename = filename[:-4] # Saves the image under the given filepath. filepath = ("%s%s.png" % (path, filename)) filepath = os.path.join(path, "%s.png" % filename) # FIXIT: filepath without SUFFIX, print writes false filename print ("Saving: %s" % filepath) self.img.save(filepath, 'PNG')
def save(self, path=DEFAULT_OUTPUT_PATH, filename=DEFAULT_FILENAME)
Saves a avatar under the given output path to a given filename. The file ending ".png" is appended automatically. If the path does not exist, it will be created. When no parameters are omitted, a default path and/or filename will be used.
5.447271
4.921659
1.106796
if (algo == HASH_MD5): hashcode = hashlib.md5() elif (algo == HASH_SHA1): hashcode = hashlib.sha1() elif (algo == HASH_SHA224): hashcode = hashlib.sha224() elif (algo == HASH_SHA256): hashcode = hashlib.sha256() elif (algo == HASH_SHA384): hashcode = hashlib.sha384() elif (algo == HASH_SHA512): hashcode = hashlib.sha512() if sys.version_info.major == 2: inpt = bytes(inpt) else: inpt = bytes(inpt, "utf-8") hashcode.update(inpt) hexhash = hashcode.hexdigest() return hexhash
def hash_input(inpt, algo=HASH_SHA256)
Generates a hash from a given String with a specified algorithm and returns the hash in hexadecimal form. Default: sha256.
1.51623
1.535737
0.987298
'''Reads the SHIELD_DECO.pgn file and creates the shield decal layer.''' layer = [] if weapons[0] in hashgrinder.SHIELDS: layer = pgnreader.parse_pagan_file(FILE_SHIELD_DECO, ip, invert=False, sym=False) return layer
def create_shield_deco_layer(weapons, ip)
Reads the SHIELD_DECO.pgn file and creates the shield decal layer.
12.607875
7.986866
1.578576
'''Reads the HAIR.pgn file and creates the hair layer.''' layer = [] if 'HAIR' in aspect: layer = pgnreader.parse_pagan_file(FILE_HAIR, ip, invert=False, sym=True) return layer
def create_hair_layer(aspect, ip)
Reads the HAIR.pgn file and creates the hair layer.
13.270682
7.913342
1.677001
'''Reads the TORSO.pgn file and creates the torso layer.''' layer = [] if 'TOP' in aspect: layer = pgnreader.parse_pagan_file(FILE_TORSO, ip, invert=False, sym=True) return layer
def create_torso_layer(aspect, ip)
Reads the TORSO.pgn file and creates the torso layer.
13.267867
8.016634
1.655042
'''Reads the SUBFIELD.pgn file and creates the subfield layer.''' layer = [] if 'PANTS' in aspect: layer = pgnreader.parse_pagan_file(FILE_SUBFIELD, ip, invert=False, sym=True) else: layer = pgnreader.parse_pagan_file(FILE_MIN_SUBFIELD, ip, invert=False, sym=True) return layer
def create_subfield_layer(aspect, ip)
Reads the SUBFIELD.pgn file and creates the subfield layer.
7.345858
5.378841
1.365695
'''Reads the BOOTS.pgn file and creates the boots layer.''' layer = [] if 'BOOTS' in aspect: layer = pgnreader.parse_pagan_file(FILE_BOOTS, ip, invert=False, sym=True) return layer
def create_boots_layer(aspect, ip)
Reads the BOOTS.pgn file and creates the boots layer.
11.772884
7.216072
1.631481
return pgnreader.parse_pagan_file(('%s%spgn%s' % (PACKAGE_DIR, os.sep, os.sep)) + shield + '.pgn', hashcode, sym=False, invert=False)
def create_shield_layer(shield, hashcode)
Creates the layer for shields.
13.026004
12.557464
1.037312
return pgnreader.parse_pagan_file(('%s%spgn%s' % (PACKAGE_DIR, os.sep, os.sep)) + weapon + '.pgn', hashcode, sym=False, invert=isSecond)
def create_weapon_layer(weapon, hashcode, isSecond=False)
Creates the layer for weapons.
13.40322
12.350001
1.085281
pixelmap = [] # Scaling the pixel offsets. for pix_x in range(MAX_X + 1): for pix_y in range(MAX_Y + 1): # Horizontal pixels y1 = pix_y * dotsize[0] x1 = pix_x * dotsize[1] # Vertical pixels y2 = pix_y * dotsize[0] + (dotsize[0] - 1) x2 = pix_x * dotsize[1] + (dotsize[1] - 1) if (y1 <= MAX_Y) and (y2 <= MAX_Y): if (x1 <= MAX_X) and (x2 <= MAX_X): if (pix_x, pix_y) in layer: pixelmap.append([(y1, x1), (y2, x2), color]) return pixelmap
def scale_pixels(color, layer)
Scales the pixel to the virtual pixelmap.
2.531988
2.472088
1.024231
'''Draws the image based on the given pixelmap.''' for item in pixelmap: color = item[2] # Access the rectangle edges. pixelbox = (item[0][0], item[0][1], item[1][0], item[1][1]) draw = ImageDraw.Draw(img) draw.rectangle(pixelbox, fill=color)
def draw_image(pixelmap, img)
Draws the image based on the given pixelmap.
4.159205
4.101377
1.0141
# Color distribution. # colors = hashgrinder.grindIpForColors(ip) colors = hashgrinder.grind_hash_for_colors(hashcode) color_body = colors[0] color_subfield = colors[1] color_weapon_a = colors[2] color_weapon_b = colors[3] color_shield_deco = colors[4] color_boots = colors[5] color_hair = colors[6] color_top = colors[7] # Grinds for the aspect. aspect = hashgrinder.grind_hash_for_aspect(hashcode) #Determine weapons of the avatar. weapons = hashgrinder.grind_hash_for_weapon(hashcode) if DEBUG: print ("Current aspect: %r" % aspect) print ("Current weapons: %r" % weapons) # There is just one body template. The optional pixels need to be mirrored so # the body layout will be symmetric to avoid uncanny looks. layer_body = pgnreader.parse_pagan_file(FILE_BODY, hashcode, invert=False, sym=True) layer_hair = create_hair_layer(aspect, hashcode) layer_boots = create_boots_layer(aspect, hashcode) layer_torso = create_torso_layer(aspect, hashcode) has_shield = (weapons[0] in hashgrinder.SHIELDS) if has_shield: layer_weapon_a = create_shield_layer(weapons[0], hashcode) layer_weapon_b = create_weapon_layer(weapons[1], hashcode) else: layer_weapon_a = create_weapon_layer(weapons[0], hashcode) if (len(weapons) == 2): layer_weapon_b = create_weapon_layer(weapons[1], hashcode, True) layer_subfield = create_subfield_layer(aspect, hashcode) layer_deco = create_shield_deco_layer(weapons, hashcode) pixelmap = scale_pixels(color_body, layer_body) pixelmap += scale_pixels(color_top, layer_torso) pixelmap += scale_pixels(color_hair, layer_hair) pixelmap += scale_pixels(color_subfield, layer_subfield) pixelmap += scale_pixels(color_boots, layer_boots) pixelmap += scale_pixels(color_weapon_a, layer_weapon_a) if (len(weapons) == 2): pixelmap += scale_pixels(color_weapon_b, layer_weapon_b) pixelmap += scale_pixels(color_shield_deco, layer_deco) return pixelmap
def setup_pixelmap(hashcode)
Creates and combines all required layers to build a pixelmap for creating the virtual pixels.
3.017919
3.02996
0.996026
img = Image.new(IMAGE_MODE, IMAGE_SIZE, BACKGROUND_COLOR) hashcode = hash_input(str, alg) pixelmap = setup_pixelmap(hashcode) draw_image(pixelmap, img) return img
def generate(str, alg)
Generates an PIL image avatar based on the given input String. Acts as the main accessor to pagan.
5.590943
5.326472
1.049652
img = Image.new(IMAGE_MODE, IMAGE_SIZE, BACKGROUND_COLOR) if len(hashcode) < 32: print ("hashcode must have lenght >= 32, %s" % hashcode) raise FalseHashError allowed = "0123456789abcdef" hashcheck = [c in allowed for c in hashcode] if False in hashcheck: print ("hashcode has not allowed structure %s" % hashcode) raise FalseHashError pixelmap = setup_pixelmap(hashcode) draw_image(pixelmap, img) return img
def generate_by_hash(hashcode)
Generates an PIL image avatar based on the given hash String. Acts as the main accessor to pagan.
4.315529
4.278991
1.008539
'''Enforces vertical symmetry of the pixelmap. Returns a pixelmap with all pixels mirrored in the middle. The initial ones still remain.''' mirror = [] for item in pixmap: y = item[0] x = item[1] if x <= IMAGE_APEX: diff_x = diff(x, IMAGE_APEX) mirror.append((y, x + (2 * diff_x) - 1)) if x > IMAGE_APEX: diff_x = diff(x, IMAGE_APEX) mirror.append((y, x - (2 * diff_x) - 1)) return mirror + pixmap
def enforce_vertical_symmetry(pixmap)
Enforces vertical symmetry of the pixelmap. Returns a pixelmap with all pixels mirrored in the middle. The initial ones still remain.
4.267814
2.781479
1.534368
if not hasattr(replace_types, '_replacer'): replace_types._replacer = build_replacer() return replace_types._replacer(text)
def replace_types(text)
Chomp down company types to a more convention form.
3.339897
3.427218
0.974521
# transliterate to ascii text = ascii_text(text) # replace punctuation and symbols text = CHARACTERS_REMOVE_RE.sub('', text) text = category_replace(text) # pad out for company type replacements text = ''.join((boundary, collapse_spaces(text), boundary)) return text
def clean_strict(text, boundary=WS)
Super-hardcore string scrubbing.
10.783421
10.857344
0.993191
msg = [] for tool_name, check_cli in collections.OrderedDict(tools).items(): try: subprocess.check_output(check_cli, shell=True, stderr=subprocess.STDOUT) except subprocess.CalledProcessError: msg.append('%r not found or not usable.' % tool_name) return '\n'.join(msg) if msg else 'Your system is ready.'
def selfcheck(tools)
Audit the system for issues. :param tools: Tools description. Use elevation.TOOLS to test elevation.
3.723746
4.206257
0.885287
datasource_root, spec = ensure_setup(cache_dir, product) ensure_tiles_names = list(spec['tile_names'](*bounds)) # FIXME: emergency hack to enforce the no-bulk-download policy if len(ensure_tiles_names) > max_download_tiles: raise RuntimeError("Too many tiles: %d. Please consult the providers' websites " "for how to bulk download tiles." % len(ensure_tiles_names)) with util.lock_tiles(datasource_root, ensure_tiles_names): ensure_tiles(datasource_root, ensure_tiles_names, **kwargs) with util.lock_vrt(datasource_root, product): util.check_call_make(datasource_root, targets=['all']) return datasource_root
def seed(cache_dir=CACHE_DIR, product=DEFAULT_PRODUCT, bounds=None, max_download_tiles=9, **kwargs)
Seed the DEM to given bounds. :param cache_dir: Root of the DEM cache folder. :param product: DEM product choice. :param bounds: Output bounds in 'left bottom right top' order. :param max_download_tiles: Maximum number of tiles to process. :param kwargs: Pass additional kwargs to ensure_tiles.
7.386317
7.89927
0.935063
bounds = build_bounds(bounds, margin=margin) datasource_root = seed(bounds=bounds, **kwargs) do_clip(datasource_root, bounds, output, **kwargs)
def clip(bounds, output=DEFAULT_OUTPUT, margin=MARGIN, **kwargs)
Clip the DEM to given bounds. :param bounds: Output bounds in 'left bottom right top' order. :param output: Path to output file. Existing files will be overwritten. :param margin: Decimal degree margin added to the bounds. Use '%' for percent margin. :param cache_dir: Root of the DEM cache folder. :param product: DEM product choice.
7.741256
12.286968
0.630038
datasource_root, _ = ensure_setup(cache_dir, product) util.check_call_make(datasource_root, targets=['info'])
def info(cache_dir=CACHE_DIR, product=DEFAULT_PRODUCT)
Show info about the product cache. :param cache_dir: Root of the DEM cache folder. :param product: DEM product choice.
18.888338
29.220396
0.646409
''' Return the consumer and oauth tokens with three-legged OAuth process and save in a yaml file in the user's home directory. ''' print('Retrieve consumer key and consumer secret from http://www.tumblr.com/oauth/apps') consumer_key = input('Paste the consumer key here: ').strip() consumer_secret = input('Paste the consumer secret here: ').strip() request_token_url = 'http://www.tumblr.com/oauth/request_token' authorize_url = 'http://www.tumblr.com/oauth/authorize' access_token_url = 'http://www.tumblr.com/oauth/access_token' # STEP 1: Obtain request token oauth_session = OAuth1Session(consumer_key, client_secret=consumer_secret) fetch_response = oauth_session.fetch_request_token(request_token_url) resource_owner_key = fetch_response.get('oauth_token') resource_owner_secret = fetch_response.get('oauth_token_secret') # STEP 2: Authorize URL + Rresponse full_authorize_url = oauth_session.authorization_url(authorize_url) # Redirect to authentication page print('\nPlease go here and authorize:\n{}'.format(full_authorize_url)) redirect_response = input('Allow then paste the full redirect URL here:\n').strip() # Retrieve oauth verifier oauth_response = oauth_session.parse_authorization_response(redirect_response) verifier = oauth_response.get('oauth_verifier') # STEP 3: Request final access token oauth_session = OAuth1Session( consumer_key, client_secret=consumer_secret, resource_owner_key=resource_owner_key, resource_owner_secret=resource_owner_secret, verifier=verifier ) oauth_tokens = oauth_session.fetch_access_token(access_token_url) tokens = { 'consumer_key': consumer_key, 'consumer_secret': consumer_secret, 'oauth_token': oauth_tokens.get('oauth_token'), 'oauth_token_secret': oauth_tokens.get('oauth_token_secret') } yaml_file = open(yaml_path, 'w+') yaml.dump(tokens, yaml_file, indent=2) yaml_file.close() return tokens
def new_oauth(yaml_path)
Return the consumer and oauth tokens with three-legged OAuth process and save in a yaml file in the user's home directory.
2.000445
1.775103
1.126946
#crazy little if statement hanging by himself :( if not params: return #We only allow one version of the data parameter to be passed data_filter = ['data', 'source', 'external_url', 'embed'] multiple_data = [key for key in params.keys() if key in data_filter] if len(multiple_data) > 1: raise Exception("You can't mix and match data parameters") #No bad fields which are not in valid options can pass disallowed_fields = [key for key in params.keys() if key not in valid_options] if disallowed_fields: field_strings = ",".join(disallowed_fields) raise Exception("{} are not allowed fields".format(field_strings))
def validate_params(valid_options, params)
Helps us validate the parameters for the request :param valid_options: a list of strings of valid options for the api request :param params: a dict, the key-value store which we really only care about the key which has tells us what the user is using for the API request :returns: None or throws an exception if the validation fails
5.890801
5.875506
1.002603
@wraps(fn) def add_dot_tumblr(*args, **kwargs): if (len(args) > 1 and ("." not in args[1])): args = list(args) args[1] += ".tumblr.com" return fn(*args, **kwargs) return add_dot_tumblr
def validate_blogname(fn)
Decorator to validate the blogname and let you pass in a blogname like: client.blog_info('codingjester') or client.blog_info('codingjester.tumblr.com') or client.blog_info('blog.johnbunting.me') and query all the same blog.
3.176415
2.927154
1.085155
url = self.host + url if params: url = url + "?" + urllib.parse.urlencode(params) try: resp = requests.get(url, allow_redirects=False, headers=self.headers, auth=self.oauth) except TooManyRedirects as e: resp = e.response return self.json_parse(resp)
def get(self, url, params)
Issues a GET request against the API, properly formatting the params :param url: a string, the url you are requesting :param params: a dict, the key-value of all the paramaters needed in the request :returns: a dict parsed of the JSON response
3.055014
3.580649
0.853201
url = self.host + url try: if files: return self.post_multipart(url, params, files) else: data = urllib.parse.urlencode(params) if not PY3: data = str(data) resp = requests.post(url, data=data, headers=self.headers, auth=self.oauth) return self.json_parse(resp) except HTTPError as e: return self.json_parse(e.response)
def post(self, url, params={}, files=[])
Issues a POST request against the API, allows for multipart data uploads :param url: a string, the url you are requesting :param params: a dict, the key-value of all the parameters needed in the request :param files: a list, the list of tuples of files :returns: a dict parsed of the JSON response
2.674542
3.010329
0.888455
try: data = response.json() except ValueError: data = {'meta': { 'status': 500, 'msg': 'Server Error'}, 'response': {"error": "Malformed JSON or HTML was returned."}} # We only really care about the response if we succeed # and the error if we fail if 200 <= data['meta']['status'] <= 399: return data['response'] else: return data
def json_parse(self, response)
Wraps and abstracts response validation and JSON parsing to make sure the user gets the correct response. :param response: The response returned to us from the request :returns: a dict of the json response
4.362051
4.725635
0.923061
resp = requests.post( url, data=params, params=params, files=files, headers=self.headers, allow_redirects=False, auth=self.oauth ) return self.json_parse(resp)
def post_multipart(self, url, params, files)
Generates and issues a multipart request for data files :param url: a string, the url you are requesting :param params: a dict, a key-value of all the parameters :param files: a dict, matching the form '{name: file descriptor}' :returns: a dict parsed from the JSON response
3.30573
4.097426
0.806782
url = "/v2/blog/{}/avatar/{}".format(blogname, size) return self.send_api_request("get", url)
def avatar(self, blogname, size=64)
Retrieves the url of the blog's avatar :param blogname: a string, the blog you want the avatar for :returns: A dict created from the JSON response
4.943819
7.078164
0.698461
kwargs.update({'tag': tag}) return self.send_api_request("get", '/v2/tagged', kwargs, ['before', 'limit', 'filter', 'tag', 'api_key'], True)
def tagged(self, tag, **kwargs)
Gets a list of posts tagged with the given tag :param tag: a string, the tag you want to look for :param before: a unix timestamp, the timestamp you want to start at to look at posts. :param limit: the number of results you want :param filter: the post format that you want returned: html, text, raw client.tagged("gif", limit=10) :returns: a dict created from the JSON response
8.797342
9.265485
0.949475
if type is None: url = '/v2/blog/{}/posts'.format(blogname) else: url = '/v2/blog/{}/posts/{}'.format(blogname, type) return self.send_api_request("get", url, kwargs, ['id', 'tag', 'limit', 'offset', 'before', 'reblog_info', 'notes_info', 'filter', 'api_key'], True)
def posts(self, blogname, type=None, **kwargs)
Gets a list of posts from a particular blog :param blogname: a string, the blogname you want to look up posts for. eg: codingjester.tumblr.com :param id: an int, the id of the post you are looking for on the blog :param tag: a string, the tag you are looking for on posts :param limit: an int, the number of results you want :param offset: an int, the offset of the posts you want to start at. :param before: an int, the timestamp for posts you want before. :param filter: the post format you want returned: HTML, text or raw. :param type: the type of posts you want returned, e.g. video. If omitted returns all post types. :returns: a dict created from the JSON response
3.995588
3.905141
1.023161
url = "/v2/blog/{}/info".format(blogname) return self.send_api_request("get", url, {}, ['api_key'], True)
def blog_info(self, blogname)
Gets the information of the given blog :param blogname: the name of the blog you want to information on. eg: codingjester.tumblr.com :returns: a dict created from the JSON response of information
7.388087
9.254462
0.798327
url = "/v2/blog/{}/following".format(blogname) return self.send_api_request("get", url, kwargs, ['limit', 'offset'])
def blog_following(self, blogname, **kwargs)
Gets the publicly exposed list of blogs that a blog follows :param blogname: the name of the blog you want to get information on. eg: codingjester.tumblr.com :param limit: an int, the number of blogs you want returned :param offset: an int, the blog to start at, for pagination. # Start at the 20th blog and get 20 more blogs. client.blog_following('pytblr', offset=20, limit=20}) :returns: a dict created from the JSON response
5.335295
7.289093
0.731956
url = "/v2/blog/{}/followers".format(blogname) return self.send_api_request("get", url, kwargs, ['limit', 'offset'])
def followers(self, blogname, **kwargs)
Gets the followers of the given blog :param limit: an int, the number of followers you want returned :param offset: an int, the follower to start at, for pagination. # Start at the 20th blog and get 20 more blogs. client.followers({'offset': 20, 'limit': 20}) :returns: A dict created from the JSON response
5.375157
7.451334
0.721368
url = "/v2/blog/{}/likes".format(blogname) return self.send_api_request("get", url, kwargs, ['limit', 'offset', 'before', 'after'], True)
def blog_likes(self, blogname, **kwargs)
Gets the current given user's likes :param limit: an int, the number of likes you want returned (DEPRECATED) :param offset: an int, the like you want to start at, for pagination. :param before: an int, the timestamp for likes you want before. :param after: an int, the timestamp for likes you want after. # Start at the 20th like and get 20 more likes. client.blog_likes({'offset': 20, 'limit': 20}) :returns: A dict created from the JSON response
5.208963
5.925119
0.879132
url = "/v2/blog/{}/posts/queue".format(blogname) return self.send_api_request("get", url, kwargs, ['limit', 'offset', 'filter'])
def queue(self, blogname, **kwargs)
Gets posts that are currently in the blog's queue :param limit: an int, the number of posts you want returned :param offset: an int, the post you want to start at, for pagination. :param filter: the post format that you want returned: HTML, text, raw. :returns: a dict created from the JSON response
6.034049
6.266924
0.962841
url = "/v2/blog/{}/posts/draft".format(blogname) return self.send_api_request("get", url, kwargs, ['filter'])
def drafts(self, blogname, **kwargs)
Gets posts that are currently in the blog's drafts :param filter: the post format that you want returned: HTML, text, raw. :returns: a dict created from the JSON response
6.911986
8.842052
0.781717
url = "/v2/blog/{}/posts/submission".format(blogname) return self.send_api_request("get", url, kwargs, ["offset", "filter"])
def submission(self, blogname, **kwargs)
Gets posts that are currently in the blog's queue :param offset: an int, the post you want to start at, for pagination. :param filter: the post format that you want returned: HTML, text, raw. :returns: a dict created from the JSON response
7.333881
7.068737
1.037509
url = "/v2/user/like" params = {'id': id, 'reblog_key': reblog_key} return self.send_api_request("post", url, params, ['id', 'reblog_key'])
def like(self, id, reblog_key)
Like the post of the given blog :param id: an int, the id of the post you want to like :param reblog_key: a string, the reblog key of the post :returns: a dict created from the JSON response
3.358149
4.158069
0.807622
url = "/v2/user/unlike" params = {'id': id, 'reblog_key': reblog_key} return self.send_api_request("post", url, params, ['id', 'reblog_key'])
def unlike(self, id, reblog_key)
Unlike the post of the given blog :param id: an int, the id of the post you want to like :param reblog_key: a string, the reblog key of the post :returns: a dict created from the JSON response
3.275285
4.096602
0.799513
kwargs.update({"type": "photo"}) return self._send_post(blogname, kwargs)
def create_photo(self, blogname, **kwargs)
Create a photo post or photoset on a blog :param blogname: a string, the url of the blog you want to post to. :param state: a string, The state of the post. :param tags: a list of tags that you want applied to the post :param tweet: a string, the customized tweet that you want :param date: a string, the GMT date and time of the post :param format: a string, sets the format type of the post. html or markdown :param slug: a string, a short text summary to the end of the post url :param caption: a string, the caption that you want applied to the photo :param link: a string, the 'click-through' url you want on the photo :param source: a string, the photo source url :param data: a string or a list of the path of photo(s) :returns: a dict created from the JSON response
5.67234
13.409504
0.423009
kwargs.update({"type": "text"}) return self._send_post(blogname, kwargs)
def create_text(self, blogname, **kwargs)
Create a text post on a blog :param blogname: a string, the url of the blog you want to post to. :param state: a string, The state of the post. :param tags: a list of tags that you want applied to the post :param tweet: a string, the customized tweet that you want :param date: a string, the GMT date and time of the post :param format: a string, sets the format type of the post. html or markdown :param slug: a string, a short text summary to the end of the post url :param title: a string, the optional title of a post :param body: a string, the body of the text post :returns: a dict created from the JSON response
5.536932
12.760626
0.433908
kwargs.update({"type": "quote"}) return self._send_post(blogname, kwargs)
def create_quote(self, blogname, **kwargs)
Create a quote post on a blog :param blogname: a string, the url of the blog you want to post to. :param state: a string, The state of the post. :param tags: a list of tags that you want applied to the post :param tweet: a string, the customized tweet that you want :param date: a string, the GMT date and time of the post :param format: a string, sets the format type of the post. html or markdown :param slug: a string, a short text summary to the end of the post url :param quote: a string, the full text of the quote :param source: a string, the cited source of the quote :returns: a dict created from the JSON response
5.595792
13.808288
0.405249
kwargs.update({"type": "link"}) return self._send_post(blogname, kwargs)
def create_link(self, blogname, **kwargs)
Create a link post on a blog :param blogname: a string, the url of the blog you want to post to. :param state: a string, The state of the post. :param tags: a list of tags that you want applied to the post :param tweet: a string, the customized tweet that you want :param date: a string, the GMT date and time of the post :param format: a string, sets the format type of the post. html or markdown :param slug: a string, a short text summary to the end of the post url :param title: a string, the title of the link :param url: a string, the url of the link you are posting :param description: a string, the description of the link you are posting :returns: a dict created from the JSON response
6.178225
12.972357
0.476261
kwargs.update({"type": "chat"}) return self._send_post(blogname, kwargs)
def create_chat(self, blogname, **kwargs)
Create a chat post on a blog :param blogname: a string, the url of the blog you want to post to. :param state: a string, The state of the post. :param tags: a list of tags that you want applied to the post :param tweet: a string, the customized tweet that you want :param date: a string, the GMT date and time of the post :param format: a string, sets the format type of the post. html or markdown :param slug: a string, a short text summary to the end of the post url :param title: a string, the title of the conversation :param conversation: a string, the conversation you are posting :returns: a dict created from the JSON response
5.587479
14.807127
0.377351
kwargs.update({"type": "audio"}) return self._send_post(blogname, kwargs)
def create_audio(self, blogname, **kwargs)
Create a audio post on a blog :param blogname: a string, the url of the blog you want to post to. :param state: a string, The state of the post. :param tags: a list of tags that you want applied to the post :param tweet: a string, the customized tweet that you want :param date: a string, the GMT date and time of the post :param format: a string, sets the format type of the post. html or markdown :param slug: a string, a short text summary to the end of the post url :param caption: a string, the caption for the post :param external_url: a string, the url of the audio you are uploading :param data: a string, the local filename path of the audio you are uploading :returns: a dict created from the JSON response
5.506751
13.124564
0.419576
kwargs.update({"type": "video"}) return self._send_post(blogname, kwargs)
def create_video(self, blogname, **kwargs)
Create a audio post on a blog :param blogname: a string, the url of the blog you want to post to. :param state: a string, The state of the post. :param tags: a list of tags that you want applied to the post :param tweet: a string, the customized tweet that you want :param date: a string, the GMT date and time of the post :param format: a string, sets the format type of the post. html or markdown :param slug: a string, a short text summary to the end of the post url :param caption: a string, the caption for the post :param embed: a string, the emebed code that you'd like to upload :param data: a string, the local filename path of the video you are uploading :returns: a dict created from the JSON response
5.816899
12.90056
0.450903
url = "/v2/blog/{}/post/reblog".format(blogname) valid_options = ['id', 'reblog_key', 'comment'] + self._post_valid_options(kwargs.get('type', None)) if 'tags' in kwargs and kwargs['tags']: # Take a list of tags and make them acceptable for upload kwargs['tags'] = ",".join(kwargs['tags']) return self.send_api_request('post', url, kwargs, valid_options)
def reblog(self, blogname, **kwargs)
Creates a reblog on the given blogname :param blogname: a string, the url of the blog you want to reblog to :param id: an int, the post id that you are reblogging :param reblog_key: a string, the reblog key of the post :param comment: a string, a comment added to the reblogged post :returns: a dict created from the JSON response
4.625059
5.156237
0.896983
url = "/v2/blog/{}/post/delete".format(blogname) return self.send_api_request('post', url, {'id': id}, ['id'])
def delete_post(self, blogname, id)
Deletes a post with the given id :param blogname: a string, the url of the blog you want to delete from :param id: an int, the post id that you want to delete :returns: a dict created from the JSON response
5.52014
7.196237
0.767087
url = "/v2/blog/{}/post".format(blogname) valid_options = self._post_valid_options(params.get('type', None)) if len(params.get("tags", [])) > 0: # Take a list of tags and make them acceptable for upload params['tags'] = ",".join(params['tags']) return self.send_api_request("post", url, params, valid_options)
def _send_post(self, blogname, params)
Formats parameters and sends the API request off. Validates common and per-post-type parameters and formats your tags for you. :param blogname: a string, the blogname of the blog you are posting to :param params: a dict, the key-value of the parameters for the api request :param valid_options: a list of valid options that the request allows :returns: a dict parsed from the JSON response
4.645332
4.510505
1.029892
if needs_api_key: params.update({'api_key': self.request.consumer_key}) valid_parameters.append('api_key') files = {} if 'data' in params: if isinstance(params['data'], list): for idx, data in enumerate(params['data']): files['data['+str(idx)+']'] = open(params['data'][idx], 'rb') else: files = {'data': open(params['data'], 'rb')} del params['data'] validate_params(valid_parameters, params) if method == "get": return self.request.get(url, params) else: return self.request.post(url, params, files)
def send_api_request(self, method, url, params={}, valid_parameters=[], needs_api_key=False)
Sends the url with parameters to the requested url, validating them to make sure that they are what we expect to have passed to us :param method: a string, the request method you want to make :param params: a dict, the parameters used for the API request :param valid_parameters: a list, the list of valid parameters :param needs_api_key: a boolean, whether or not your request needs an api key injected :returns: a dict parsed from the JSON response
2.37039
2.646863
0.895547
tree = compile(code, path, "exec", ast.PyCF_ONLY_AST) McCabeChecker.max_complexity = int(params.get('complexity', 10)) return [ {'lnum': lineno, 'offset': offset, 'text': text, 'type': McCabeChecker._code} for lineno, offset, text, _ in McCabeChecker(tree, path).run() ]
def run(path, code=None, params=None, **meta)
MCCabe code checking. :return list: List of errors.
5.326314
4.966065
1.072542
seen = set() values = val if isinstance(val, (list, tuple)) else val.strip().split(',') return [x for x in values if x and not (x in seen or seen.add(x))]
def split_csp_str(val)
Split comma separated string into unique values, keeping their order. :returns: list of splitted values
3.507583
3.269259
1.072898
result = list() for name in split_csp_str(linters): linter = LINTERS.get(name) if linter: result.append((name, linter)) else: logging.warning("Linter `%s` not found.", name) return result
def parse_linters(linters)
Initialize choosen linters. :returns: list of inited linters
4.389285
5.050542
0.869072
if rootdir is None: return DEFAULT_CONFIG_FILE for path in CONFIG_FILES: path = os.path.join(rootdir, path) if os.path.isfile(path) and os.access(path, os.R_OK): return path
def get_default_config_file(rootdir=None)
Search for configuration file.
2.171179
2.076571
1.04556
cfg = get_config(str(options.options), rootdir=rootdir) for opt, val in cfg.default.items(): LOGGER.info('Find option %s (%s)', opt, val) passed_value = getattr(options, opt, _Default()) if isinstance(passed_value, _Default): if opt == 'paths': val = val.split() if opt == 'skip': val = fix_pathname_sep(val) setattr(options, opt, _Default(val)) # Parse file related options for name, opts in cfg.sections.items(): if name == cfg.default_section: continue if name.startswith('pylama'): name = name[7:] if name in LINTERS: options.linters_params[name] = dict(opts) continue mask = re.compile(fnmatch.translate(fix_pathname_sep(name))) options.file_params[mask] = dict(opts) # Override options _override_options(options, **overrides) # Postprocess options for name in options.__dict__: value = getattr(options, name) if isinstance(value, _Default): setattr(options, name, process_value(name, value.value)) if options.concurrent and 'pylint' in options.linters: LOGGER.warning('Can\'t parse code asynchronously with pylint enabled.') options.concurrent = False return options
def parse_options(args=None, config=True, rootdir=CURDIR, **overrides): # noqa args = args or [] # Parse args from command string options = PARSER.parse_args(args) options.file_params = dict() options.linters_params = dict() # Compile options from ini if config
Parse options from command line and configuration files. :return argparse.Namespace:
4.612477
4.678769
0.985831
for opt, val in overrides.items(): passed_value = getattr(options, opt, _Default()) if opt in ('ignore', 'select') and passed_value: value = process_value(opt, passed_value.value) value += process_value(opt, val) setattr(options, opt, value) elif isinstance(passed_value, _Default): setattr(options, opt, process_value(opt, val))
def _override_options(options, **overrides)
Override options.
4.252077
4.059135
1.047533
action = ACTIONS.get(name) if not action: return value if callable(action.type): return action.type(value) if action.const: return bool(int(value)) return value
def process_value(name, value)
Compile option value.
4.596166
4.244585
1.082831
config = Namespace() config.default_section = 'pylama' if not ini_path: path = get_default_config_file(rootdir) if path: config.read(path) else: config.read(ini_path) return config
def get_config(ini_path=None, rootdir=None)
Load configuration from INI. :return Namespace:
3.975779
4.235734
0.938628
LOGGER.setLevel(logging.INFO if options.verbose else logging.WARN) if options.report: LOGGER.removeHandler(STREAM) LOGGER.addHandler(logging.FileHandler(options.report, mode='w')) if options.options: LOGGER.info('Try to read configuration from: %r', options.options)
def setup_logger(options)
Do the logger setup with options.
4.015098
4.126982
0.97289
import _ast builtins = params.get("builtins", "") if builtins: builtins = builtins.split(",") tree = compile(code, path, "exec", _ast.PyCF_ONLY_AST) w = checker.Checker(tree, path, builtins=builtins) w.messages = sorted(w.messages, key=lambda m: m.lineno) return [{ 'lnum': m.lineno, 'text': m.message % m.message_args, 'type': m.message[0] } for m in w.messages]
def run(path, code=None, params=None, **meta)
Check code with pyflakes. :return list: List of errors.
3.803263
3.452326
1.101652
if not candidates: candidates = [] for path_ in options.paths: path = op.abspath(path_) if op.isdir(path): for root, _, files in walk(path): candidates += [op.relpath(op.join(root, f), CURDIR) for f in files] else: candidates.append(path) if rootdir is None: rootdir = path if op.isdir(path) else op.dirname(path) paths = [] for path in candidates: if not options.force and not any(l.allow(path) for _, l in options.linters): continue if not op.exists(path): continue paths.append(path) if options.concurrent: return check_async(paths, options, rootdir) errors = [] for path in paths: errors += run(path=path, code=code, rootdir=rootdir, options=options) return errors
def check_path(options, rootdir=None, candidates=None, code=None)
Check path. :param rootdir: Root directory (for making relative file paths) :param options: Parsed pylama options (from pylama.config.parse_options) :returns: (list) Errors list
3.034732
3.059588
0.991876
if args is None: args = sys.argv[1:] options = parse_options(args) setup_logger(options) LOGGER.info(options) # Install VSC hook if options.hook: from .hook import install_hook for path in options.paths: return install_hook(path) return process_paths(options, error=error)
def shell(args=None, error=True)
Endpoint for console. Parse a command arguments, configuration files and run a checkers. :return list: list of errors :raise SystemExit:
5.041686
5.746087
0.877412
errors = check_path(options, rootdir=CURDIR, candidates=candidates) if options.format in ['pycodestyle', 'pep8']: pattern = "%(filename)s:%(lnum)s:%(col)s: %(text)s" elif options.format == 'pylint': pattern = "%(filename)s:%(lnum)s: [%(type)s] %(text)s" else: # 'parsable' pattern = "%(filename)s:%(lnum)s:%(col)s: [%(type)s] %(text)s" for er in errors: if options.abspath: er._info['filename'] = op.abspath(er.filename) LOGGER.warning(pattern, er._info) if error: sys.exit(int(bool(errors))) return errors
def process_paths(options, candidates=None, error=True)
Process files and log errors.
3.326667
3.386791
0.982247
self.tokens = [] self.source = source self.pos = 0
def reset(self, source)
Reset scanner's state. :param source: Source for parsing
5.412066
6.016602
0.899522
self.pre_scan() token = None end = len(self.source) while self.pos < end: best_pat = None best_pat_len = 0 # Check patterns for p, regexp in self.patterns: m = regexp.match(self.source, self.pos) if m: best_pat = p best_pat_len = len(m.group(0)) break if best_pat is None: raise SyntaxError( "SyntaxError[@char {0}: {1}]".format( self.pos, "Bad token.")) # Ignore patterns if best_pat in self.ignore: self.pos += best_pat_len continue # Create token token = ( best_pat, self.source[self.pos:self.pos + best_pat_len], self.pos, self.pos + best_pat_len, ) self.pos = token[-1] self.tokens.append(token)
def scan(self)
Scan source and grab tokens.
3.099917
2.853777
1.086251
escape_re = re.compile(r'\\\n[\t ]+') self.source = escape_re.sub('', self.source)
def pre_scan(self)
Prepare string for scanning.
6.948061
5.812273
1.195412
for key in self: yield key, self.__getitem__(key, raw=raw)
def iteritems(self, raw=False)
Iterate self items.
4.777037
4.132632
1.155931
for f in files: try: with io.open(f, encoding='utf-8') as ff: NS_LOGGER.info('Read from `{0}`'.format(ff.name)) self.parse(ff.read(), **params) except (IOError, TypeError, SyntaxError, io.UnsupportedOperation): if not self.silent_read: NS_LOGGER.error('Reading error `{0}`'.format(ff.name)) raise
def read(self, *files, **params)
Read and parse INI files. :param *files: Files for reading :param **params: Params for parsing Set `update=False` for prevent values redefinition.
3.935149
4.010619
0.981183
if isinstance(f, str): f = io.open(f, 'w', encoding='utf-8') if not hasattr(f, 'read'): raise AttributeError("Wrong type of file: {0}".format(type(f))) NS_LOGGER.info('Write to `{0}`'.format(f.name)) for section in self.sections.keys(): f.write('[{0}]\n'.format(section)) for k, v in self[section].items(): f.write('{0:15}= {1}\n'.format(k, v)) f.write('\n') f.close()
def write(self, f)
Write namespace as INI file. :param f: File object or path to file.
2.922389
2.763555
1.057474
scanner = INIScanner(source) scanner.scan() section = self.default_section name = None for token in scanner.tokens: if token[0] == 'KEY_VALUE': name, value = re.split('[=:]', token[1], 1) name, value = name.strip(), value.strip() if not update and name in self[section]: continue self[section][name] = value elif token[0] == 'SECTION': section = token[1].strip('[]') elif token[0] == 'CONTINUATION': if not name: raise SyntaxError( "SyntaxError[@char {0}: {1}]".format( token[2], "Bad continuation.")) self[section][name] += '\n' + token[1].strip()
def parse(self, source, update=True, **params)
Parse INI source as string. :param source: Source of INI :param update: Replace already defined items
3.895767
3.751683
1.038405
errors = [] fileconfig = dict() linters = LINTERS linters_params = dict() lname = 'undefined' params = dict() path = op.relpath(path, rootdir) if options: linters = options.linters linters_params = options.linters_params for mask in options.file_params: if mask.match(path): fileconfig.update(options.file_params[mask]) if options.skip and any(p.match(path) for p in options.skip): LOGGER.info('Skip checking for path: %s', path) return [] try: with CodeContext(code, path) as ctx: code = ctx.code params = prepare_params(parse_modeline(code), fileconfig, options) LOGGER.debug('Checking params: %s', params) if params.get('skip'): return errors for item in params.get('linters') or linters: if not isinstance(item, tuple): item = item, LINTERS.get(item) lname, linter = item if not linter: continue lparams = linters_params.get(lname, dict()) LOGGER.info("Run %s %s", lname, lparams) ignore, select = merge_params(params, lparams) linter_errors = linter.run( path, code=code, ignore=ignore, select=select, params=lparams) if not linter_errors: continue errors += filter_errors([ Error(filename=path, linter=lname, **er) for er in linter_errors ], ignore=ignore, select=select) except IOError as e: LOGGER.error("IOError %s", e) errors.append(Error(text=str(e), filename=path, linter=lname)) except SyntaxError as e: LOGGER.error("SyntaxError %s", e) errors.append( Error(linter='pylama', lnum=e.lineno, col=e.offset, text='E0100 SyntaxError: {}'.format(e.args[0]), filename=path)) except Exception as e: # noqa import traceback LOGGER.error(traceback.format_exc()) errors = list(remove_duplicates(errors)) if code and errors: errors = filter_skiplines(code, errors) if options and options.sort: sort = dict((v, n) for n, v in enumerate(options.sort, 1)) def key(e): return (sort.get(e.type, 999), e.lnum) else: def key(e): return e.lnum return sorted(errors, key=key)
def run(path='', code=None, rootdir=CURDIR, options=None)
Run code checkers with given params. :param path: (str) A file's path. :param code: (str) A code source :return errors: list of dictionaries with error's information
3.295148
3.322627
0.99173
seek = MODELINE_RE.search(code) if seek: return dict(v.split('=') for v in seek.group(1).split(':')) return dict()
def parse_modeline(code)
Parse params from file's modeline. :return dict: Linter params.
5.275412
5.839087
0.903465
params = dict(skip=False, ignore=[], select=[], linters=[]) if options: params['ignore'] = list(options.ignore) params['select'] = list(options.select) for config in filter(None, [modeline, fileconfig]): for key in ('ignore', 'select', 'linters'): params[key] += process_value(key, config.get(key, [])) params['skip'] = bool(int(config.get('skip', False))) # TODO: skip what? This is causing erratic behavior for linters. params['skip'] = False params['ignore'] = set(params['ignore']) params['select'] = set(params['select']) return params
def prepare_params(modeline, fileconfig, options)
Prepare and merge a params from modelines and configs. :return dict:
4.196558
4.686509
0.895455
select = select or [] ignore = ignore or [] for e in errors: for s in select: if e.number.startswith(s): yield e break else: for s in ignore: if e.number.startswith(s): break else: yield e
def filter_errors(errors, select=None, ignore=None, **params)
Filter errors by select and ignore options. :return bool:
2.333244
2.924392
0.797856
if not errors: return errors enums = set(er.lnum for er in errors) removed = set([ num for num, l in enumerate(code.split('\n'), 1) if num in enums and SKIP_PATTERN(l) ]) if removed: errors = [er for er in errors if er.lnum not in removed] return errors
def filter_skiplines(code, errors)
Filter lines by `noqa`. :return list: A filtered errors
4.926225
5.154236
0.955762
ignore = params.get('ignore', set()) if 'ignore' in lparams: ignore = ignore | set(lparams['ignore']) select = params.get('select', set()) if 'select' in lparams: select = select | set(lparams['select']) return ignore, select
def merge_params(params, lparams)
Merge global ignore/select with linter local params.
2.507501
1.837631
1.364529
LOGGER.info('Async code checking is enabled.') path_queue = Queue.Queue() result_queue = Queue.Queue() for num in range(CPU_COUNT): worker = Worker(path_queue, result_queue) worker.setDaemon(True) LOGGER.info('Start worker #%s', (num + 1)) worker.start() for path in paths: path_queue.put((path, dict(options=options, rootdir=rootdir))) path_queue.join() errors = [] while True: try: errors += result_queue.get(False) except Queue.Empty: break return errors
def check_async(paths, options, rootdir=None)
Check given paths asynchronously. :return list: list of errors
2.833215
2.872684
0.986261
while True: path, params = self.path_queue.get() errors = run(path, **params) self.result_queue.put(errors) self.path_queue.task_done()
def run(self)
Run tasks from queue.
4.068095
3.514903
1.157385
try: result = line.split(':', maxsplit=4) filename, line_num_txt, column_txt, message_type, text = result except ValueError: return try: self.line_num = int(line_num_txt.strip()) self.column = int(column_txt.strip()) except ValueError: return self.filename = filename self.message_type = message_type.strip() self.text = text.strip() self.valid = True
def _parse(self, line)
Parse the output line
2.911394
2.83891
1.025533
text = [self.text] if self.note: text.append(self.note) return { 'lnum': self.line_num, 'col': self.column, 'text': ' - '.join(text), 'type': self.types.get(self.message_type, '') }
def to_result(self)
Convert to the Linter.run return value
4.019112
3.816452
1.053102
args = [path, '--follow-imports=skip', '--show-column-numbers'] stdout, stderr, status = api.run(args) messages = [] for line in stdout.split('\n'): line.strip() if not line: continue message = _MyPyMessage(line) if message.valid: if message.message_type == 'note': if messages[-1].line_num == message.line_num: messages[-1].add_note(message.text) else: messages.append(message) return [m.to_result() for m in messages]
def run(path, code=None, params=None, **meta)
Check code with mypy. :return list: List of errors.
3.943657
3.732609
1.056542
if isinstance(value, (list, tuple, set)): return ",".join(value) if isinstance(value, bool): return "y" if value else "n" return str(value)
def prepare_value(value)
Prepare value to pylint.
2.891488
2.68757
1.075875
parser = get_parser() for option in parser.option_list: if option.dest and option.dest in params: value = params[option.dest] if isinstance(value, str): params[option.dest] = option.convert_value(option, value) for key in ["filename", "exclude", "select", "ignore"]: if key in params and isinstance(params[key], str): params[key] = _parse_multi_options(params[key]) P8Style = StyleGuide(reporter=_PycodestyleReport, **params) buf = StringIO(code) return P8Style.input_file(path, lines=buf.readlines())
def run(path, code=None, params=None, **meta)
Check code with pycodestyle. :return list: List of errors.
4.827487
3.944107
1.223975
super(_PycodestyleReport, self).init_file( filename, lines, expected, line_offset) self.errors = []
def init_file(self, filename, lines, expected, line_offset)
Prepare storage for errors.
5.661604
4.660131
1.214902
code = super(_PycodestyleReport, self).error( line_number, offset, text, check) if code: self.errors.append(dict( text=text, type=code.replace('E', 'C'), col=offset + 1, lnum=line_number, ))
def error(self, line_number, offset, text, check)
Save errors.
4.636909
4.533515
1.022806
if 'ignore_decorators' in params: ignore_decorators = params['ignore_decorators'] else: ignore_decorators = None check_source_args = (code, path, ignore_decorators) if THIRD_ARG else (code, path) return [{ 'lnum': e.line, # Remove colon after error code ("D403: ..." => "D403 ..."). 'text': (e.message[0:4] + e.message[5:] if e.message[4] == ':' else e.message), 'type': 'D', 'number': e.code } for e in PyDocChecker().check_source(*check_source_args)]
def run(path, code=None, params=None, **meta)
pydocstyle code checking. :return list: List of errors.
6.465392
5.79838
1.115034