repo_name
stringlengths
5
114
repo_url
stringlengths
24
133
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
directory_id
stringlengths
40
40
branch_name
stringclasses
209 values
visit_date
timestamp[ns]
revision_date
timestamp[ns]
committer_date
timestamp[ns]
github_id
int64
9.83k
683M
star_events_count
int64
0
22.6k
fork_events_count
int64
0
4.15k
gha_license_id
stringclasses
17 values
gha_created_at
timestamp[ns]
gha_updated_at
timestamp[ns]
gha_pushed_at
timestamp[ns]
gha_language
stringclasses
115 values
files
listlengths
1
13.2k
num_files
int64
1
13.2k
AgalmicVentures/JTL
https://github.com/AgalmicVentures/JTL
a8d4910285066a913dd000706226c3771721c6b8
2163dc1a974144b20d3d3e849c49fcfcffa9c0ef
4708d833a715456b783391db0965a96fbbb5edb9
refs/heads/master
2023-01-10T14:17:54.744249
2023-01-08T21:53:01
2023-01-08T21:53:01
52,494,450
4
2
null
null
null
null
null
[ { "alpha_fraction": 0.6522870659828186, "alphanum_fraction": 0.6625139713287354, "avg_line_length": 25.10194206237793, "blob_id": "57f4eb46601e698d44c7d32da93e2ef977257daa", "content_id": "b255f92f27344a631629ec248717a64f77b42304", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5378, "license_type": "permissive", "max_line_length": 128, "num_lines": 206, "path": "/JTL/Functions.py", "repo_name": "AgalmicVentures/JTL", "src_encoding": "UTF-8", "text": "\n# Copyright (c) 2015-2023 Agalmic Ventures LLC (www.agalmicventures.com)\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\nimport binascii\nimport hashlib\nimport hmac\nimport math\n\n########## Basic Functions ##########\n\ndef toBool(data):\n\treturn data == 'True' or data == 'true'\n\ndef toFloat(data):\n\ttry:\n\t\treturn float(data)\n\texcept ValueError:\n\t\treturn None\n\texcept TypeError:\n\t\treturn None\n\ndef toInt(data):\n\ttry:\n\t\treturn int(data)\n\texcept ValueError:\n\t\treturn None\n\texcept TypeError:\n\t\treturn None\n\ndef toNumber(data):\n\t#TODO: figure out how to handle this cleanly (don't round floats)\n\tif type(data) is float:\n\t\treturn data\n\n\tintValue = toInt(data)\n\tif intValue is not None:\n\t\treturn intValue\n\n\treturn toFloat(data)\n\nfunctions = {\n\t#Any\n\t'toString': str,\n\t'toBool': toBool,\n\t'toFloat': toFloat,\n\t'toInt': toInt,\n\t'toNumber': toNumber,\n\n\t#None\n\t'isNull': lambda x: x is None,\n\n\t'default': lambda x, y: x if x is not None else y,\n\t'defaultNan': lambda x: x if x is not None else float('nan'),\n\n\t#Sequence\n\t'first': lambda s: s[0] if s is not None and len(s) > 1 else None,\n\t'rest': lambda s: s[1:] if s is not None and len(s) > 1 else None,\n\t'last': lambda s: s[-1] if s is not None and len(s) > 1 else None,\n\t'init': lambda s: s[:-1] if s is not None and len(s) > 1 else None,\n\n\t#String\n\t'join': lambda s, *args: (args[0] if len(args) > 0 else '').join(s) if s is not None else None,\n}\n\n########## Maybe Functions ##########\n\n#Functions in here handle null like the Option type\n\ndef maybe(f):\n\treturn lambda *args: f(*args) if None not in args else None\n\n#Functions that will be wrapped in maybe()\nmaybeFunctions = {\n\t#Bool\n\t'not': lambda x: not x,\n\n\t#Dict\n\t'keys': lambda d: list(d.keys()),\n\t'values': lambda d: list(d.values()),\n\n\t#Numer\n\t'+': lambda x, y: x + y,\n\t'-': lambda x, y: x - y,\n\t'*': lambda x, y: x * y,\n\t'/': lambda x, y: x / y,\n\t'**': lambda x, y: x ** y,\n\t'%': lambda x, y: x % y,\n\n\t'==': lambda x, y: x == y,\n\t'!=': lambda x, y: x != y,\n\t'<': lambda x, y: x < y,\n\t'<=': lambda x, y: x <= y,\n\t'>': lambda x, y: x > y,\n\t'>=': lambda x, y: x >= y,\n\n\t'isFinite': math.isfinite,\n\t'isNan': math.isnan,\n\n\t'abs': abs,\n\t'ceil': math.ceil,\n\t'cos': math.cos,\n\t'cosh': math.cosh,\n\t'erf': math.erf,\n\t'exp': math.exp,\n\t'floor': math.floor,\n\t'lg': math.log2,\n\t'ln': math.log,\n\t'log': math.log10,\n\t'sin': math.sin,\n\t'sinh': math.sinh,\n\t'sqrt': math.sqrt,\n\t'tan': math.tan,\n\t'tanh': math.tanh,\n\n\t#Sequence\n\t'count': lambda s, f: s.count(f),\n\t'length': len,\n\t'max': max,\n\t'min': min,\n\t'sorted': lambda s: sorted(s),\n\t'sum': sum,\n\t'unique': lambda s: list(set(s)),\n\n\t#String\n\t'lower': lambda s: s.lower(),\n\t'upper': lambda s: s.upper(),\n\t'capitalize': lambda s: s.capitalize(),\n\t'swapCase': lambda s: s.swapcase(),\n\n\t'strip': lambda s: s.strip(),\n\t'lstrip': lambda s: s.lstrip(),\n\t'rstrip': lambda s: s.rstrip(),\n\n\t'find': lambda s, f: s.find(f),\n\t'replace': lambda s, f, g: s.replace(f, g),\n\t'startsWith': lambda s, f: s.startswith(f),\n\t'endsWith': lambda s, f: s.endswith(f),\n\n\t'split': lambda s, sp: s.split(sp),\n\t'lines': lambda s: s.split('\\n'),\n\t'unlines': lambda s: '\\n'.join(s),\n\t'words': lambda s: s.split(' '),\n\t'unwords': lambda s: ' '.join(s),\n}\n\nfor name in maybeFunctions:\n\tfunction = maybeFunctions[name]\n\tfunctions[name] = maybe(function)\n\n########## Hash Functions ##########\n\ndef hashFunction(hashConstructor):\n\t\"\"\"\n\tAccepts the constructor of a hash algorithm and returns a function from a string to a hexified string digest.\n\n\t:param hashConstructor: hashing algorithm (e.g. hashlib.md5)\n\t:return: f(str)\n\t\"\"\"\n\tdef f(s):\n\t\th = hashConstructor()\n\t\th.update(s.encode('utf8', 'ignore'))\n\t\treturn binascii.hexlify(h.digest()).decode('utf8')\n\treturn f\n\ndef hmacFunction(hashConstructor):\n\t\"\"\"\n\tAccepts the constructor of a hash algorithm and returns an HMAC function.\n\n\t:param hashConstructor: hashing algorithm (e.g. hashlib.md5)\n\t:return: hmac(str, key)\n\t\"\"\"\n\tdef h(message, key):\n\t\treturn hmac.new(key=key.encode('utf8', 'ignore'), msg=message.encode('utf8', 'ignore'), digestmod=hashConstructor).hexdigest()\n\treturn h\n\nhashFunctions = {\n\t'md5': hashlib.md5,\n\t'sha1': hashlib.sha1,\n\t'sha224': hashlib.sha224,\n\t'sha256': hashlib.sha256,\n\t'sha384': hashlib.sha384,\n\t'sha512': hashlib.sha512,\n}\n\nfor name in hashFunctions:\n\tfunction = hashFunctions[name]\n\tfunctions[name] = hashFunction(function)\n\n\tfunctions['hmac_%s' % name] = hmacFunction(function)\n" }, { "alpha_fraction": 0.6392078995704651, "alphanum_fraction": 0.6500990390777588, "avg_line_length": 29.239521026611328, "blob_id": "54df119b9dc1480a6dc51d34450ea91fa9cac839", "content_id": "f882fc301b3bbc0bd18200a36e5c382e32f49d85", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 5050, "license_type": "permissive", "max_line_length": 264, "num_lines": 167, "path": "/README.md", "repo_name": "AgalmicVentures/JTL", "src_encoding": "UTF-8", "text": "# JTL\nJSON Transformation Language, JTL, is like `sed` and `awk` for JSON: a simple language for\ntransforming JSON values into other JSON values. The syntax of the language itself is also JSON\n(so it can operate on itself - meta!). Command line prototyping is easy:\n\n > cat tests/faa1.json\n {\n ...\n \"weather\": {\n ...,\n \"temp\": \"66.0 F (18.9 C)\",\n ...\n }\n }\n\n > cat tests/faa1.json | ./JTL/__init__.py '{\"tempF\": \"weather.temp\"}'\n {\n \"tempF\": \"66.0 F (18.9 C)\"\n }\n\n > cat tests/faa1.json | ./JTL/__init__.py '{\"tempF\": \"weather.temp $ words\"}'\n {\n \"tempF\": [\n \"66.0\",\n \"F\",\n \"(18.9\",\n \"C)\"\n ]\n }\n\n > cat tests/faa1.json | ./JTL/__init__.py '{\"tempF\": \"weather.temp $ words $ first\"}'\n {\n \"tempF\": \"66.0\"\n }\n\n > cat tests/faa1.json | ./JTL/__init__.py '{\"tempF\": \"weather.temp $ words $ first $ toFloat\"}'\n {\n \"tempF\": 66.0\n }\n\n## Motivation\nAlthough JSON has replaced XML as the de facto data format for structured text data, no standard suite of\nsupporting technologies has emerged. JTL is to JSON what XSL is to XML -- a transformation language written\nin the underlying format. It allows the quick creation of format converters, adapters for 3rd party API's,\ntransform scripts for ETL's, and more.\n\nJTL is designed to be simple to parse for both humans and computers. This makes the implementation simple,\nand allows the creation of value-added features like query optimizers.\n\nBecause it's input and output are JSON, it's highly composable. In fact, sometimes composition is the only\nway to do things. Since the code is also JSON, it can even be used self-referentially, for example to\nautomate refactoring.\n\n## Syntax\nThe basic syntax of a JTL transformation is a JSON dictionary with the same structure as the output, where all values are strings are JTL expressions.\n\nJTL expressions are of the form `<SELECTOR> [$ <FUNCTION> <ARG1>*]*`.\n\nSelectors are `.` separated paths: for example, `a.b.c` would return `3` from `{\"a\": {\"b\": {\"c\": 3}}}`.\n\nFunctions (and operators) transform data extracted by selectors.\n\n## Operators\nJTL supports the following operators in [Polish notation](https://en.wikipedia.org/wiki/Polish_notation) with the same semantics as Python:\n\n* Arithmetic: `+`, `-`, `*`, `/`, `**`, `%`\n* Comparison: `==`, `!=`, `<`, `<=`, `>`, `>=`\n\nFor example:\n\n > cat tests/faa1.json | ./JTL/__init__.py '{\"x\": \"weather.temp $ words $ first $ toFloat $ + 3.0 $ / 23\"}'\n {\n \"x\": 3.0\n }\n\n## Functions\nJTL has a wide variety of built in transformations. In order to easily handle missing values, all functions will pass through null unless otherwise indicated (much like an option monad).\n\n### Basic\n\n#### `default <VALUE>`\nReturns the input value or the first argument if the input is `null` (this is the one case with special `null` handling).\n\n#### `defaultNan`\nReturns the input value or `NaN` if the input is `null`.\n\n#### `isNull`\nReturns true if the value is `null`.\n\n#### `toBool`\nConverts the input value to a boolean.\n\n#### `toFloat`\nConverts the input value to a float, returning `null` if it is not a valid number.\n\n#### `toInt`\nConverts the input value to an integer, returning `null` if it is not a valid integer.\n\n#### `toString`\nConverts the input value to a string.\n\n### Bool\n\n#### `not`\nInverts the boolean value.\n\n### Dictionary\n\n#### `keys`\nReturns the keys of the dictionary as a list.\n\n#### `values`\nReturns the values of the dictionary as a list.\n\n### Hashing\nJTL supports a variety of cryptographic hash functions: `md5`, `sha1`, `sha224`, `sha256`, `sha384`, `sha512`. In addition, [HMAC's](https://en.wikipedia.org/wiki/Hash-based_message_authentication_code) are supported for each of these hash types (e.g. `hmac_md5`).\n\n### Math\n\n* Basics: `abs`, `ceil`, `floor`\n* Exponentials: `exp`, `lg`, `ln`, `log`, `sqrt`\n* Flags: `isFinite`, `isNan`\n* Trigonometry: `sin`, `cos`, `tan`\n* Hyperbolic trigonometry: `sinh`, `cosh`, `tanh`\n* Advanced: `erf`\n\n### Sequence\n\n#### `count <ELEMENT>`\nReturns the number of times the element appears in the list.\n\n#### `first`\nReturns the first element of the list, or `null` if the list is empty.\n\n#### `init`\nReturns all of the elements of the list except the last one.\n\n#### `last`\nReturns the last element of the list, or `null` if the list is empty.\n\n#### `rest`\nReturns the rest of the list after the first element.\n\n#### `length`\nReturns the length of the list.\n\n#### `max`\nFinds the maximum value in the list.\n\n#### `min`\nFinds the minimum value in the list.\n\n#### `sorted`\nReturns a sorted version of the list.\n\n#### `sum`\nTakes the sum of values in the list.\n\n#### `unique`\nReturns a copy of the list with duplicates removed.\n\n### String\n\n* Case transformation: `capitalize`, `lower`, `swapCase`, `upper`\n* Search: `find`, `replace`, `startsWith`, `endsWith`\n* Split / join: `join`, `split`, `lines`, `unlines`, `words`, `unwords`\n* Whitespace: `lstrip`, `rstrip`, `strip`\n" }, { "alpha_fraction": 0.7467771768569946, "alphanum_fraction": 0.7504603862762451, "avg_line_length": 31.402984619140625, "blob_id": "ce16537e06e821ed2de52884fe35db10c295ae8c", "content_id": "8344707f85aa1485c2add5bdde80597f37789cbc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2172, "license_type": "permissive", "max_line_length": 79, "num_lines": 67, "path": "/JTL/Parser.py", "repo_name": "AgalmicVentures/JTL", "src_encoding": "UTF-8", "text": "\n# Copyright (c) 2015-2023 Agalmic Ventures LLC (www.agalmicventures.com)\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\nimport json\nimport shlex\n\nfrom JTL import Utility\n\ndef parseTransform(transform):\n\t\"\"\"\n\tParses a single JTL transform into tokens.\n\n\t:param transform: str\n\t:return: [[str]]\n\t\"\"\"\n\t#Create a lexer with some slight tweaks\n\tlexer = shlex.shlex(transform, posix=False)\n\tlexer.wordchars += '.+-*=<>!'\n\n\t#Split into operations\n\toperations = []\n\toperation = []\n\tfor token in lexer:\n\t\t#Split tokens on $\n\t\tif token == '$':\n\t\t\toperations.append(operation)\n\t\t\toperation = []\n\t\telse:\n\t\t\toperation.append(token)\n\n\t#Append any final operation\n\toperations.append(operation)\n\n\treturn operations\n\ndef parseArgument(argument, data):\n\t\"\"\"\n\tParses an argument to an operation.\n\n\t:param argument: str from tokenization\n\t:param data: dict of original data to extract more fields from\n\t:return: a valid JSON value\n\t\"\"\"\n\ttry:\n\t\t#Try loading as a constrant first\n\t\t#TODO: strings are awkward and require escaping, so figure that out\n\t\treturn json.loads(argument)\n\texcept ValueError:\n\t\t#If that fails, it might be a name\n\t\treturn Utility.extractPath(data, argument)\n" }, { "alpha_fraction": 0.7253521084785461, "alphanum_fraction": 0.7309859395027161, "avg_line_length": 35.21428680419922, "blob_id": "e885501daa0e96b43d1b5c1e8a21c3c8e3934894", "content_id": "75d36cf8177330ba35e5b599aa984a1ae812cbb5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3550, "license_type": "permissive", "max_line_length": 122, "num_lines": 98, "path": "/JTL/Interpreter.py", "repo_name": "AgalmicVentures/JTL", "src_encoding": "UTF-8", "text": "\n# Copyright (c) 2015-2023 Agalmic Ventures LLC (www.agalmicventures.com)\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\nfrom JTL import Functions\nfrom JTL import Parser\nfrom JTL import Utility\n\ndef applyOperation(value, operation, args, location):\n\t\"\"\"\n\tApplies an operation to a value with some extra arguments.\n\n\t:param value: a valid JSON value\n\t:param operation: str name of the operation to apply (from the tokenizer)\n\t:param args: [str] argument tokens\n\t:return: a valid JSON value\n\t\"\"\"\n\tfunction = Functions.functions.get(operation)\n\tif function is None:\n\t\t#Is it a simple integer index?\n\t\tindex = Functions.toInt(operation)\n\t\tif index is not None:\n\t\t\treturn value[index]\n\n\t\t#Or perhaps it's a selector function? .abc.def\n\t\tif operation[0] == '.':\n\t\t\tif len(args) == 0:\n\t\t\t\treturn Utility.extractPath(value, operation[1:])\n\t\t\telse:\n\t\t\t\traise SyntaxError('selector %s has arguments in \"%s\" (did you mean to do an operation?)' % (operation[0], location))\n\n\t\t#Nothing found -- error!\n\t\traise NameError('cannot find operation %s in \"%s\"' % (operation, location))\n\n\treturn function(value, *args)\n\ndef transform(data, transform, location=''):\n\t\"\"\"\n\tComputes one single transformation on some input data.\n\n\t:param data: dict\n\t:param key: str output key (used for error reporting)\n\t:param transform: str JTL expression\n\t:return: a valid JSON value\n\t\"\"\"\n\t#Parse the transformation into tokens\n\ttokens = Parser.parseTransform(transform)\n\tif len(tokens) == 0:\n\t\treturn None\n\n\tprimarySelector = tokens[0][0]\n\tvalue = Utility.extractPath(data, primarySelector)\n\tfor n, section in enumerate(tokens[1:]):\n\t\tif len(section) == 0:\n\t\t\t#n is the previous token\n\t\t\traise SyntaxError('missing final operation after %s in \"%s\"' % (tokens[n][0], location))\n\n\t\toperation = section[0]\n\t\targs = [Parser.parseArgument(argument, data) for argument in section[1:]]\n\t\tvalue = applyOperation(value, operation, args, location)\n\n\treturn value\n\ndef transformJson(data, transformData, location=''):\n\t\"\"\"\n\tTransforms some input data based on a transformation (transformData).\n\n\t:param data: dict\n\t:param transformData: dict | list | str\n\t:return: dict\n\t\"\"\"\n\tif type(transformData) is dict:\n\t\tresult = {}\n\t\tfor k, v in transformData.items():\n\t\t\tresult[k] = transformJson(data, v, '%s.%s' % (location, k))\n\telif type(transformData) is list:\n\t\tresult = [transformJson(data, v, '%s.%s' % (location, n)) for n, v in enumerate(transformData)]\n\telif type(transformData) is str:\n\t\tresult = transform(data, transformData, location)\n\telse:\n\t\tresult = None\n\treturn result\n" }, { "alpha_fraction": 0.7526754140853882, "alphanum_fraction": 0.759215235710144, "avg_line_length": 40, "blob_id": "0af9126ebd4e45ae0996e99bc533826a2bfb25a4", "content_id": "fd1d12a69b72d6ec644322a6628dfc4aa904b697", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1682, "license_type": "permissive", "max_line_length": 106, "num_lines": 41, "path": "/JTL/Utility.py", "repo_name": "AgalmicVentures/JTL", "src_encoding": "UTF-8", "text": "\n# Copyright (c) 2015-2023 Agalmic Ventures LLC (www.agalmicventures.com)\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\ndef extractPath(data, path):\n\t\"\"\"\n\tIndexes a JSON object with a period separated path.\n\n\t:param data: dict\n\t:param path: str\n\t:return: a valid JSON value\n\t\"\"\"\n\tsplitPath = path.split('.')\n\treturn extractSplitPath(data, splitPath)\n\ndef extractSplitPath(data, splitPath):\n\t\"\"\"\n\tIndexes a JSON object with list of string keys as a path.\n\n\t:param data: dict\n\t:param path: [str]\n\t:return: a valid JSON value\n\t\"\"\"\n\tnextData = data.get(splitPath[0])\n\treturn nextData if len(splitPath) <= 1 or nextData is None else extractSplitPath(nextData, splitPath[1:])\n" }, { "alpha_fraction": 0.7462509870529175, "alphanum_fraction": 0.7513812184333801, "avg_line_length": 34.69013977050781, "blob_id": "fd42ba7d23d8cc7631d23f10a81b4de260b00fd6", "content_id": "d7da7cab3b5dc49fabf846cbc9f09b110fce18c3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2534, "license_type": "permissive", "max_line_length": 118, "num_lines": 71, "path": "/JTL/__init__.py", "repo_name": "AgalmicVentures/JTL", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\n# Copyright (c) 2015-2023 Agalmic Ventures LLC (www.agalmicventures.com)\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\nimport argparse\nimport json\nimport sys\n\ndef main():\n\t\"\"\"\n\tRuns the main JTL program.\n\n\t:return: int\n\t\"\"\"\n\n\t#Parse arguments\n\tparser = argparse.ArgumentParser(description='JSON Transformation Language')\n\tparser.add_argument('-i', '--indent', default=4, type=int, help='Indentation amount.')\n\tparser.add_argument('-t', '--transform-file', help='The name of the JSON file containing the transformation to run.')\n\tparser.add_argument('transform', nargs='?', help='The transformation to run.')\n\targuments = parser.parse_args(sys.argv[1:])\n\n\t#Load the transformation\n\tif arguments.transform is None and arguments.transform_file is not None:\n\t\t#From a file\n\t\twith open(arguments.transform_file, 'r') as f:\n\t\t\ttransformStr = f.read()\n\telif arguments.transform is not None and arguments.transform_file is None:\n\t\t#From the command line\n\t\ttransformStr = arguments.transform\n\telse:\n\t\tprint('ERROR: Specify either a transform file or a transform')\n\t\treturn 1\n\n\ttransformData = json.loads(transformStr)\n\n\t#Read the JSON in from stdin\n\t#TODO: error handling\n\tdata = json.loads(sys.stdin.read())\n\n\t#Transform the JSON\n\t#TODO: cleaner way to do this\n\tsys.path.append('.')\n\timport Interpreter\n\tresult = Interpreter.transformJson(data, transformData)\n\n\t#Output the result\n\tprint(json.dumps(result, indent=arguments.indent, sort_keys=True))\n\n\treturn 0\n\nif __name__ == '__main__':\n\tsys.exit(main())\n" } ]
6
Kyekifino/PLM18-Group-E
https://github.com/Kyekifino/PLM18-Group-E
c17f2e2bcea49b70d31815bc5f55e377cd87720e
7af121dd790550b6831921877e1850f38423a0ba
34a8d987e61b64344b064296b65496f415e5fd8b
refs/heads/master
2021-05-12T04:21:17.757506
2018-05-10T06:35:14
2018-05-10T06:35:14
117,159,098
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5712290406227112, "alphanum_fraction": 0.5812051296234131, "avg_line_length": 36.12592697143555, "blob_id": "a81d8874daa0f64d7f423a873e91bfa8e9a2d828", "content_id": "258fe825e2960f93802b74cc6d41331e06f2bf11", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5012, "license_type": "no_license", "max_line_length": 129, "num_lines": 135, "path": "/proj3/Stress.py", "repo_name": "Kyekifino/PLM18-Group-E", "src_encoding": "UTF-8", "text": "from Game import Game\nfrom Cards import Deck, Card, NotInStackException\nfrom testFramework import testFramework\n\n# Implementation of Bartok as a Game\n\nclass Stress(Game):\n\n #---------------------------------------------------\n # Initializing attributes, and adding action methods\n #---------------------------------------------------\n def __init__(self, players):\n super(Stress, self).__init__(players)\n self.playFlag = False\n self.currentPlayer = None\n self.skipNextTurn = False\n # Discard pile used to pick up with cheat moves\n self.played = Deck()\n # Dictionary to define possible actions to take\n self.actions = { \"{players}\" : self.getPlayers,\n \"{start}\" : self.playStress,\n \"{play}\" : self.playCards,\n \"{hand}\" : self.showHand,\n \"{help}\" : self.getHelp }\n\n #---------------------------------------------------\n # Defining game actions\n #---------------------------------------------------\n def getHelp(self, player, msglist):\n player.tell(\"To play cards on your turn, write {play} followed by the card you want to drop and the card you want to take. \")\n player.tell(\"For example, write \\\"{play} H4 S4\\\" to drop the 4 of Hearts and pick up the 4 of Spades. \")\n player.tell(\"In order to play a card, the card must you're picking up must be on the table. \")\n player.tell(\"The goal of the game is to have \" + str(48 / len(self.players)) + \" piles of four of a kind. \")\n player.tell(\"To see your hand, write {hand}. For help, write {help}.\")\n\n def showHand(self, player, msglist):\n player.tell(\"The cards in your hand:\")\n player.tell(player.getHand())\n\n def playStress(self, player, msglist):\n size = len(self.players)\n if size != 2 and size != 3 and size != 4 and size != 6 and size != 12:\n player.tell(\"The number of players must divide twelve (2, 3, 4, 6, or 12 people\")\n else:\n self.playing = True\n\n def getPlayers(self, player, msglist):\n player.tell(\"Current players: \")\n msg = \"\"\n for p in self.players:\n msg += (p.name + \"\\n\")\n player.tell(msg[:-1])\n\n def playCards(self, player, msglist):\n cards = msglist[1:]\n playedCards = []\n for card in cards:\n card = Card(str(card[0]),str(card[1:]))\n playedCards.append(card)\n if not self.playing:\n player.tell(\"Wait for the game to start...\")\n elif player != self.currentPlayer:\n player.tell(\"Wait for your turn...\")\n elif (len(msglist) != 3):\n player.tell(\"You must drop one card and pick up one card.\")\n elif (playedCards[1] not in self.played.cards):\n player.tell(\"You must pick up one of the played cards.\")\n elif (playedCards[0] not in player.hand.cards):\n player.tell(\"You must own the card you want to drop.\")\n else:\n self.played.addToDeck(playedCards[0])\n player.hand.cards.remove(playedCards[0])\n player.addToHand(self.played.remove(playedCards[1]))\n self.broadcast(str(self.currentPlayer) + \" dropped \" + str(playedCards[0]) + \" and picked up \" + str(playedCards[1]))\n self.showGUIHand(self.currentPlayer)\n self.playFlag = True\n\n #---------------------------------------------------\n # Defining game rules\n #---------------------------------------------------\n\n def pregameActions(self):\n # Set to players\n self.nextPlayer = iter(self.nextPlayerIterFunc())\n\n # Make game announcements\n self.broadcast(\"The Stress Game is starting!\")\n self.broadcast(\"There are %d players playing!\" % len(self.players))\n self.wait(1)\n for p in self.players:\n self.getHelp(p, None)\n self.wait(2)\n self.deck.shuffle()\n self.deck.dealCards(self.players, 48 / len(self.players))\n self.played.addToDeck(self.deck.draw())\n self.played.addToDeck(self.deck.draw())\n self.played.addToDeck(self.deck.draw())\n self.played.addToDeck(self.deck.draw())\n for p in self.players:\n p.tell(\"//{hand}//\" + p.getHand())\n return True\n\n def preplayGuards(self):\n self.broadcast(\"It is %s\\'s turn!\\n\" % self.currentPlayer.name)\n self.wait(.25)\n self.broadcast(\"Current cards are \" + str(self.played))\n self.wait(1)\n self.showHand(self.currentPlayer, None)\n return True\n\n def doPlay(self):\n while not self.playFlag:\n pass\n self.playFlag = False\n return True\n\n def checkForVictory(self):\n types = []\n suits = {\n \"H\" : 0,\n \"S\" : 0,\n \"D\" : 0,\n \"C\" : 0\n }\n for card in self.currentPlayer.hand.cards:\n if card.rank not in types:\n types.append(card.rank)\n suits[card.suit] = 1\n return suits[\"H\"] == 1 and suits[\"D\"] == 1 and suits[\"S\"] == 1 and suits[\"C\"] and types == 48 / len(self.players)\n \n\n def endGame(self):\n self.wait(1)\n self.broadcast(str(self.currentPlayer) + \" has \" + str(48 / len(self.players)) + \" piles of four of a kind and wins!\")\n self.broadcast(\"Thanks for playing!\")\n" }, { "alpha_fraction": 0.5100228190422058, "alphanum_fraction": 0.5123065114021301, "avg_line_length": 23.33333396911621, "blob_id": "caa19eeb7e333f35095ed155d92cca7da7560164", "content_id": "51f71238c1c38a4663ce3180d0a23bd44faa6c8a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3941, "license_type": "no_license", "max_line_length": 80, "num_lines": 162, "path": "/proj2/stateMachineFramework.py", "repo_name": "Kyekifino/PLM18-Group-E", "src_encoding": "UTF-8", "text": "# vim: set filetype=python ts=2 sw=2 sts=2 expandtab:\nimport re, random\n\n\ndef rseed(seed=1):\n random.seed(int(seed))\n\n\ndef shuffle(lst):\n random.shuffle(lst)\n return lst\n\n\ndef about(f):\n print(\"\\n-----| %s |-----------------\" % f.__name__)\n if f.__doc__:\n print(\"# \" + re.sub(r'\\n[ \\t]*', \"\\n# \", f.__doc__))\n\ndef contains(all, some):\n return all.find(some) != -1\n\ndef isa(k, seen=None):\n assert isinstance(k, type), \"superclass must be 'object'\"\n seen = seen or set()\n if k not in seen:\n seen.add(k)\n yield k\n for sub in k.__subclasses__():\n for x in isa(sub, seen):\n yield x\n\n\nclass Thing(object):\n def __repr__(self):\n return self.__class__.__name__ + kv(self.__dict__)\n\n\nclass o(Thing):\n def __init__(self, **dic): self.__dict__.update(dic)\n\n def __getitem__(self, x): return self.__dict__[x]\n\nclass BigPayloadException(Exception):\n def __init__(self, message=\"Payload too large...\"):\n self.message = message\n\n# ---------------------------------------\ndef asLambda(self, txt):\n def methodsOf(self):\n return [s for s in self.__dir__() if s[0] is not \"_\"]\n\n for one in methodsOf(self):\n txt = re.sub(one, 'z.%s()' % one, txt)\n txt = \"lambda z: \" + txt\n # e.g. print(\"> \",code(self))\n\n\n# ---------------------------------------\n# <BEGIN>\n# Base implementation for State class\nclass State(Thing):\n tag = \"\"\n\n def __init__(self, name, m):\n self.name = name\n self._trans = []\n self.model = m\n\n def trans(self, gaurd, there):\n self._trans += [o(gaurd=gaurd, there=there)]\n\n def step(self):\n for j in shuffle(self._trans):\n if j.gaurd(self):\n #print(\"now\", j.gaurd.__name__)\n self.onExit()\n j.there.onEntry()\n return j.there\n return self\n\n def onEntry(self):\n pass\n\n def onExit(self):\n pass\n\n def quit(self):\n return False\n\n# ------------------------------------------------------------------------------\nclass Machine(Thing):\n \"\"\"Maintains a set of named states.\n Creates new states if its a new name.\n Returns old states if its an old name.\"\"\"\n\n def __init__(self, name):\n self.all = {}\n self.name = name\n self.start = None\n self.functions = {}\n\n def isa(self, x):\n if isinstance(x, State):\n return x\n for k in isa(State):\n if k.tag and contains(x, k.tag):\n return k(x, self)\n return State(x, self)\n\n def state(self, x):\n self.all[x] = y = self.all[x] if x in self.all else self.isa(x)\n self.start = self.start or y\n return y\n\n def trans(self, here, gaurd, there):\n self.state(here).trans(gaurd,\n self.state(there))\n\n def run(self):\n print(self.name)\n state = self.start\n state.onEntry()\n while True:\n state = state.step()\n if state.quit():\n break\n return state.onExit()\n\n def maybe(self, s):\n return random.random() < 0.5\n\n def true(self, s):\n return True\n\n# Create with win condition specs to allow player control\nclass OuterMachine(Machine):\n\n def __init__(self, name, numPlayers):\n self.all = {}\n self.name = name\n self.start = None\n self.numPlayers = numPlayers\n self.repeat = 0\n\n\n\n# Create with specs to simulate a turn\nclass InnerMachine(Machine):\n\n def __init__(self, name, currPlayer, currRank):\n self.all = {}\n self.name = name\n self.start = None\n self.currPlayer = currPlayer\n self.currRank = currRank\n\n\n# ---------------------------------------\n# Creates a new machine with given specifications\ndef make(machine, specification):\n specification(machine, machine.state, machine.trans)\n return machine" }, { "alpha_fraction": 0.5700517892837524, "alphanum_fraction": 0.5789725184440613, "avg_line_length": 35.07758712768555, "blob_id": "1eef11d68041f7c0fe764a7dbe359fab464824c4", "content_id": "8b73d911f008a9847bfcc966c12a6150ed7703fd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12555, "license_type": "no_license", "max_line_length": 177, "num_lines": 348, "path": "/proj2/bartokServer.py", "repo_name": "Kyekifino/PLM18-Group-E", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\n#Boiler plate code taken from https://medium.com/swlh/lets-write-a-chat-app-in-python-f6783a9ac170\n\"\"\"Server for multithreaded (asynchronous) chat application.\"\"\"\nimport time\nfrom socket import AF_INET, socket, SOCK_STREAM, timeout\nfrom threading import Thread, Timer\nfrom stateMachineFramework import State, OuterMachine, InnerMachine, make\nfrom player import playedDeck, unplayedDeck, Player\nfrom cards import Card\n\ndef accept_incoming_connections():\n \"\"\"Sets up handling for incoming clients.\"\"\"\n global bartokGame\n bartokGame = None\n while bartokGame is None:\n try:\n client, client_address = SERVER.accept()\n except timeout:\n pass\n else:\n if bartokGame is None:\n print(\"%s:%s has connected.\" % client_address)\n client.send(bytes(\"Welcome to the Bartok server! Now type your name and press enter to join!\\n\", \"utf8\"))\n addresses[client] = client_address\n Thread(target=handle_client, args=(client,)).start()\n bartokGame.run()\n\n\ndef handle_client(client): # Takes client socket as argument.\n \"\"\"Handles a single client connection.\"\"\"\n name = client.recv(BUFSIZ).decode(\"utf8\")\n while name in clients.values():\n client.send(bytes(\"Another user has that name. Try again.\\n\", \"utf8\"))\n name = client.recv(BUFSIZ).decode(\"utf8\")\n welcome = 'Welcome %s! If you ever want to quit, type {quit} to exit. To see all users in the room, type {users}. To start the game, type {start}.\\n' % name\n client.send(bytes(welcome, \"utf8\"))\n msg = \"%s has joined the chat!\\n\" % name\n broadcast(bytes(msg, \"utf8\"))\n clients[client] = name\n\n while True:\n msg = client.recv(BUFSIZ).decode(\"utf8\")\n msglist = msg.split()\n if msglist[0] != \"{quit}\":\n if msglist[0] not in commands:\n broadcast(bytes(msg + \"\\n\", \"utf8\"), name+\": \")\n else:\n commands[msglist[0]](client, msglist)\n else:\n client.send(bytes(\"{quit}\", \"utf8\"))\n client.close()\n del clients[client]\n broadcast(bytes(\"%s has left the chat.\" % name, \"utf8\"))\n break\n\n\ndef broadcast(msg, prefix=\"\"): # prefix is for name identification.\n \"\"\"Broadcasts a message to all the clients.\"\"\"\n\n for sock in clients:\n sock.send(bytes(prefix, \"utf8\")+msg)\n\ndef broadcastToOthers(msg, currentPlayer, youMessage):\n for sock in clients:\n if (clients[sock] == currentPlayer):\n sock.send(youMessage)\n else:\n sock.send(msg)\n\n#----------------------------------------\n# Usable commands\n#----------------------------------------\n\ndef showHand(client, args):\n name = str(clients[client])\n msg = \"Hand: \" + players[name].getHand()\n client.send(bytes(msg + \"\\n\", \"utf8\"))\n\ndef getStatus(client, args):\n global bartokGame\n if bartokGame is None:\n client.send(bytes(\"Wait for the game to start...\\n\", \"utf8\"))\n else:\n client.send(bytes(\"You have \" + str(len(players[currentPlayer].hand.cards)) + \" cards.\\n\", \"utf8\"))\n for i in players.keys():\n if (i != currentPlayer):\n client.send(bytes(players[i].name + \" has \" + str(len(players[players[i].name].hand.cards)) + \" cards.\\n\", \"utf8\"))\n\ndef playSomeCards(client, args):\n global bartokGame\n global turnFlag\n global victory\n global skipNextTurn\n if bartokGame is None:\n client.send(bytes(\"Wait for the game to start...\\n\", \"utf8\"))\n elif clients[client] == currentPlayer:\n\n if (len(args) != 2):\n client.send(bytes(\"You must play one card.\\n\", \"utf8\"))\n else:\n myHand = players[currentPlayer].hand.cards\n myCard = Card(args[1][0], args[1][1:])\n if myCard not in myHand:\n client.send(bytes(\"You don't have this card!\\n\", \"utf8\"))\n return\n if myCard.rank == playedDeck.lastCard().rank or myCard.suit == playedDeck.lastCard().suit:\n playedDeck.addToDeck(myCard)\n players[currentPlayer].playCards([] + [myCard])\n broadcastToOthers(bytes(currentPlayer + \" played \" + str(myCard) + \"\\n\", \"utf8\"), currentPlayer, bytes(\"You played %s\\n\" % str(myCard), \"utf8\"))\n if myCard.rank == '2':\n players[nextPlayer].draw(2)\n skipNextTurn = True\n turnFlag = True\n else:\n client.send(bytes(\"You cannot play this card! Play a card from the same suit or rank.\\n\", \"utf8\"))\n return\n else:\n client.send(bytes(\"Wait for your turn...\\n\", \"utf8\"))\n\ndef drawCard(client, args):\n global bartokGame\n global turnFlag\n if bartokGame is None:\n client.send(bytes(\"Wait for the game to start...\\n\", \"utf8\"))\n elif clients[client] == currentPlayer:\n if (len(args) != 1):\n client.send(bytes(\"Draw uses no arguments! Type {draw}.\\n\", \"utf8\"))\n else:\n if unplayedDeck.isEmpty() and playedDeck.isEmpty():\n client.send(bytes(\"Both played and unplayed decks are empty, skip the turn.\\n\", \"utf8\"))\n else:\n players[currentPlayer].draw()\n broadcastToOthers(bytes(currentPlayer + \" drew one card!\\n\", \"utf8\"), currentPlayer, bytes(\"You drew one card!\\n\", \"utf8\"))\n showHand(client, args)\n turnFlag = True\n else:\n client.send(bytes(\"Wait for your turn...\\n\", \"utf8\"))\n\ndef players(client, args):\n msg = \"Current players: \"\n \"\"\"Show names and addresses of all players\"\"\"\n for p in clients:\n msg = msg + str(clients[p]) + \", \"\n msg = msg[0:-2]\n client.send(bytes(msg + \"\\n\", \"utf8\"))\n\ndef playBartok(client, args):\n global bartokGame\n # \n if bartokGame is None and len(clients) >= 4 and len(clients) <= 9:\n broadcast(bytes(\"\\n%s has decided to start a game of Bartok!\\n\\n\" % clients[client], \"utf8\"))\n broadcast(bytes(\"There are %d players playing!\\n\" % len(clients), \"utf8\"))\n time.sleep(.5)\n broadcast(bytes(\"To play cards on your turn, write {play} followed by the card.\\n\", \"utf8\"))\n broadcast(bytes(\"For example, write \\\"{play} H4\\\" to play the 4 of Hearts.\\n\", \"utf8\"))\n broadcast(bytes(\"In order to play a card, the card must match either the rank or the suite of the displayed card.\\n\", \"utf8\"))\n broadcast(bytes(\"If you cannot play a card, you must draw. To draw the card, write {draw}\\n\", \"utf8\"))\n broadcast(bytes(\"To see your hand, write {hand}. To see the number of cards in each players hand, write {status}. For help, write {help}.\\n\", \"utf8\"))\n time.sleep(.5)\n unplayedDeck.fillDeck()\n unplayedDeck.shuffle()\n for p in clients: #Init players\n players[clients[p]] = Player(clients[p])\n players[clients[p]].draw(5)\n # players[clients[p]].hand.cards.append(Card(\"S\", \"5\"))\n playedDeck.addToDeck(unplayedDeck.draw())\n # playedDeck.addToDeck(Card(\"S\", \"7\"))\n for p in clients:\n if clients[p] != currentPlayer:\n showHand(p, [])\n bartokGame = make(OuterMachine(\"Welcome to the game!\", len(clients)), bartokSpec)\n elif len(clients) < 4:\n client.send(bytes(\"Not enough players to start!\\n\", \"utf8\"))\n elif len(clients > 9):\n client.send(bytes(\"Too many players to start!\\n\", \"utf8\"))\n else:\n client.send(bytes(\"A game of bartok is currently occurring!\\n\", \"utf8\"))\n\ndef getHelp(client, args):\n client.send(bytes(\"To play cards on your turn, write {play} followed by the card.\\n\", \"utf8\"))\n client.send(bytes(\"For example, write \\\"{play} H4\\\" to play the 4 of Hearts.\\n\", \"utf8\"))\n client.send(bytes(\"In order to play a card, the card must match either the rank or the suite of the displayed card.\\n\", \"utf8\"))\n client.send(bytes(\"If you cannot play a card, you must draw. To draw the card, write {draw}\\n\", \"utf8\"))\n client.send(bytes(\"To see your hand, write {hand}. To see the number of cards in each players hand, write {status}. For help, write {help}.\\n\", \"utf8\"))\n\n#----------------------------------------\n\ncommands = { \"{users}\" : players,\n \"{start}\" : playBartok,\n \"{play}\" : playSomeCards,\n \"{hand}\" : showHand,\n \"{status}\" : getStatus,\n \"{draw}\": drawCard,\n \"{help}\" : getHelp }\nclients = {}\naddresses = {}\nplayers = {}\nbartokGame = None\nturnFlag = False\nvictory = False\ncurrentPlayer = None\nnextPlayer = None\nskipNextTurn = False\n\n\nHOST = ''\nPORT = 33000\nBUFSIZ = 1024\nADDR = (HOST, PORT)\n\nSERVER = socket(AF_INET, SOCK_STREAM)\nSERVER.bind(ADDR)\n\n#----------------------------------------\n# Bartok game specifications\n#----------------------------------------\ndef bartokSpec(m, s, t):\n\n def timeToExit(i):\n return victory\n def repeatTurns(i):\n return not victory\n\n m.leave = timeToExit\n m.repeat = repeatTurns\n player = s(\"player*\")\n exitGame = s(\"exit.\")\n t(\"start\", m.true, player)\n t(player, m.repeat, player)\n t(player, m.leave, exitGame)\n\nclass Turn(State):\n global victory\n victory = False\n\n tag = \"*\"\n currPlayer = 0\n currSuit = 1\n\n def onEntry(self):\n global skipNextTurn\n global currentPlayer\n global nextPlayer\n p = [clients[k] for k in clients]\n currentPlayer = p[self.currPlayer]\n nextPlayer = p[self.currPlayer + 1 if self.currPlayer < self.model.numPlayers - 1 else 0]\n \n broadcast(bytes(\"\\n<---------------------------------------->\\n\", \"utf8\"))\n if skipNextTurn:\n skipNextTurn = False\n broadcastToOthers(bytes(currentPlayer + \" drew two cards and skips a turn!\\n\", \"utf8\"), currentPlayer, bytes(\"You draw two cards and skip a turn!\\n\", \"utf8\"))\n else:\n name = str(currentPlayer + \" is up!\\n\")\n broadcastToOthers(bytes(name, \"utf8\"), currentPlayer, bytes(\"It's your turn!\\n\", \"utf8\"))\n make(InnerMachine(name,self.currPlayer,self.currSuit),bartokTurnSpec).run()\n\n def onExit(self):\n if self.currPlayer < self.model.numPlayers - 1:\n self.currPlayer += 1\n else:\n self.currPlayer = 0\n if self.currSuit < 13:\n self.currSuit += 1\n else:\n self.currSuit = 1\n\nclass GameOver(State):\n tag = \".\"\n\n def quit(self):\n return True\n\n def onExit(self):\n global players\n global bartokGame\n global turnFlag\n global victory\n global currentPlayer\n global nextPlayer\n players = {}\n bartokGame = None\n turnFlag = False\n victory = False\n currentPlayer = None\n nextPlayer = None\n playedDeck.resetDeck()\n unplayedDeck.resetDeck()\n broadcast(bytes(\"Game over!\\n\", \"utf8\"))\n return False\n\n#----------------------------------------\n# Bartok turn specifications\n#----------------------------------------\ndef bartokTurnSpec(m, s, t):\n def waitForCards(i):\n global turnFlag\n global victory\n m.currCard = playedDeck.lastCard()\n broadcast(bytes(\"Current card is %s.\\n\" % str(m.currCard), \"utf8\"))\n p = [k for k in clients]\n showHand(p[m.currPlayer], [])\n turnFlag = False\n while not turnFlag:\n pass\n if not players[currentPlayer].hand.cards:\n broadcastToOthers(bytes(\"%s emptied their hand! They Win the Game.\\n\" % currentPlayer, \"utf8\"), currentPlayer, bytes(\"CONGRATULATIONS, YOU WIN THE GAME!\\n\", \"utf8\"))\n victory = True\n return False\n return True\n \n m.playcards = waitForCards\n play = s(\"play/\")\n nextTurn = s(\"exit=\")\n t(\"start\", m.true, play)\n t(play, m.playcards, nextTurn)\n\nclass PlayCards(State):\n tag = \"/\"\n\n def quit(self):\n return victory\n\n def onExit(self):\n return True\n\nclass NextTurn(State):\n tag = \"=\"\n\n def quit(self):\n return True\n\n def onEntry(self):\n print(\"Entered into next turn state. Should move to next turn.\")\n\n def onExit(self):\n return False\n\n#----------------------------------------\n\nif __name__ == \"__main__\":\n SERVER.settimeout(0.2)\n SERVER.listen(5)\n print(\"Waiting for connection...\")\n ACCEPT_THREAD = Thread(target=accept_incoming_connections)\n ACCEPT_THREAD.start()\n ACCEPT_THREAD.join()\n SERVER.close()\n" }, { "alpha_fraction": 0.5578778386116028, "alphanum_fraction": 0.5621650815010071, "avg_line_length": 37.875, "blob_id": "a79b0b403be9ca13fef8b9c814b30fd297ae12d6", "content_id": "845038f8711fc27031586706713f05834b45bd2e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5598, "license_type": "no_license", "max_line_length": 122, "num_lines": 144, "path": "/proj3/Bartok.py", "repo_name": "Kyekifino/PLM18-Group-E", "src_encoding": "UTF-8", "text": "from Game import Game\nfrom Cards import Deck, Card, NotInStackException\nfrom testFramework import testFramework\n\n# Implementation of Bartok as a Game\n\nclass Bartok(Game):\n\n #---------------------------------------------------\n # Initializing attributes, and adding action methods\n #---------------------------------------------------\n def __init__(self, players):\n super(Bartok, self).__init__(players)\n self.playFlag = False\n self.currentPlayer = None\n self.skipNextTurn = False\n # Discard pile used to pick up with cheat moves\n self.played = Deck()\n # Dictionary to define possible actions to take\n self.actions = { \"{players}\" : self.getPlayers,\n \"{start}\" : self.playBartok,\n \"{play}\" : self.playCards,\n \"{hand}\" : self.showHand,\n \"{draw}\": self.drawCard,\n \"{help}\" : self.getHelp }\n\n #---------------------------------------------------\n # Defining game actions\n #---------------------------------------------------\n def getHelp(self, player, msglist):\n player.tell(\"To play cards on your turn, write {play} followed by the cards. \")\n player.tell(\"For example, write \\\"{play} H4 S4\\\" to play the 4 of Hearts and the 4 of Spades. \")\n player.tell(\"In order to play a card, the card must match either the rank or the suite of the displayed card. \")\n player.tell(\"If you cannot play a card, you must draw. To draw the card, write {draw}. \")\n player.tell(\"To see your hand, write {hand}. For help, write {help}.\")\n\n def showHand(self, player, msglist):\n player.tell(\"The cards in your hand:\")\n player.tell(player.getHand())\n\n def playBartok(self, player, msglist):\n if len(self.players) < 2 or len(self.players) >= 9:\n player.tell(\"Not enough players to start the game...\")\n else:\n self.playing = True\n\n def getPlayers(self, player, msglist):\n player.tell(\"Current players: \")\n msg = \"\"\n for p in self.players:\n msg += (p.name + \" --- hand size: \" + str(p.getHandSize()) + \"\\n\")\n player.tell(msg[:-1])\n\n def playCards(self, player, msglist):\n if not self.playing:\n player.tell(\"Wait for the game to start...\")\n elif player != self.currentPlayer:\n player.tell(\"Wait for your turn...\")\n elif (len(msglist) != 2):\n player.tell(\"You must play one card.\")\n else:\n cards = msglist[1:]\n playedCards = []\n for card in cards:\n card = Card(str(card[0]),str(card[1:]))\n playedCards.append(card)\n try:\n if playedCards[0].rank == self.played.lastCard().rank or playedCards[0].suit == self.played.lastCard().suit:\n player.playFromHand(playedCards, self.played)\n self.broadcast(str(self.currentPlayer) + \" played \" + str(playedCards[0]))\n self.showGUIHand(self.currentPlayer)\n # if playedCards[0].rank == '2':\n # self.nextPlayer.addToHand(self.deck.draw())\n # self.nextPlayer.addToHand(self.deck.draw())\n # self.skipNextTurn = True\n self.playFlag = True\n else:\n player.tell(\"You cannot play this card! Play a card from the same suit or rank.\")\n return\n self.broadcast(\"They currently hold \" + str(player.hand.size()) + \" cards.\")\n\n except NotInStackException:\n player.tell(\"You can only play cards that are in your hand.\")\n\n def drawCard(self, player, msglist):\n if not self.playing:\n player.tell(\"Wait for the game to start...\")\n elif player != self.currentPlayer:\n player.tell(\"Wait for your turn...\")\n else:\n if (len(msglist) != 1):\n player.tell(\"Draw uses no arguments! Type {draw}\")\n else:\n if self.deck.isEmpty() and self.played.isEmpty():\n player.tell(\"Both played and unplayed decks are empty, skip the turn.\")\n else:\n self.currentPlayer.addToHand(self.deck.draw())\n self.showGUIHand(self.currentPlayer)\n self.broadcast(str(self.currentPlayer) + \" drew one card!\")\n self.playFlag = True\n\n #---------------------------------------------------\n # Defining game rules\n #---------------------------------------------------\n\n def pregameActions(self):\n # Set to players\n self.nextPlayer = iter(self.nextPlayerIterFunc())\n\n # Make game announcements\n self.broadcast(\"The Bartok Game is starting!\")\n self.broadcast(\"There are %d players playing!\" % len(self.players))\n self.wait(1)\n for p in self.players:\n self.getHelp(p, None)\n self.wait(2)\n self.deck.shuffle()\n self.deck.dealCards(self.players, 5)\n self.played.addToDeck(self.deck.draw())\n for p in self.players:\n p.tell(\"//{hand}//\" + p.getHand())\n return True\n\n def preplayGuards(self):\n self.broadcast(\"It is %s\\'s turn!\" % self.currentPlayer.name)\n self.wait(.25)\n self.broadcast(\"Current card is \" + str(self.played.lastCard()))\n self.wait(1)\n self.showHand(self.currentPlayer, None)\n return True\n\n def doPlay(self):\n while not self.playFlag:\n pass\n self.playFlag = False\n return True\n\n def checkForVictory(self):\n return self.currentPlayer.hand.isEmpty()\n\n def endGame(self):\n self.wait(1)\n self.broadcast(str(self.currentPlayer) + \" has emptied their hand, and wins!\")\n self.broadcast(\"Thanks for playing!\")\n" }, { "alpha_fraction": 0.4836806058883667, "alphanum_fraction": 0.48552462458610535, "avg_line_length": 27.098445892333984, "blob_id": "f5958256109148254677d548b87d006e7f57e43d", "content_id": "8daea5f9fe6f1d43f661fc937089f535bffdcf70", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5423, "license_type": "no_license", "max_line_length": 113, "num_lines": 193, "path": "/proj3/Game.py", "repo_name": "Kyekifino/PLM18-Group-E", "src_encoding": "UTF-8", "text": "from Cards import Deck\nfrom stateMachineFramework import State, InnerMachine, OuterMachine, make\nfrom testFramework import testFramework\nfrom time import sleep\n\n# Inheritance-based implementation based on Group F (https://github.com/ejgillia/plm18_f)\nclass Game(object):\n\n #----------------------------------------\n # Constructs a Game object\n #----------------------------------------\n def __init__(self, players):\n self.deck = Deck()\n self.deck.fillDeck()\n self.playing = False\n self.actions = {}\n self.players = players\n self.nextPlayer = iter(self.nextPlayerIterFunc())\n self.currentPlayer = next(self.nextPlayer)\n\n #----------------------------------------\n # Parses whether a player input is an action.\n #----------------------------------------\n def parseAction(self, player, msg):\n msglist = msg.split()\n if msglist[0] in self.actions:\n self.actions[msglist[0]](player, msglist)\n else:\n self.broadcast(msg, player.name+\": \")\n\n #----------------------------------------\n # Broadcast a message to all the clients\n #----------------------------------------\n def broadcast(self, msg, prefix=\"\"):\n for p in self.players:\n p.tell(prefix + msg)\n\n #----------------------------------------\n # Show a client their hand from the GUI\n #----------------------------------------\n def showGUIHand(self, player):\n player.tell(\"//{hand}//\" + player.getHand())\n self.wait(.25)\n\n #----------------------------------------\n # Sleep for given time\n #----------------------------------------\n def wait(self, sleepTime):\n sleep(sleepTime)\n\n #----------------------------------------\n # Iterator to return next player\n #----------------------------------------\n def nextPlayerIterFunc(self):\n currPlayer = 0\n while True:\n yield self.players[currPlayer]\n if (currPlayer < (len(self.players) - 1)):\n currPlayer = currPlayer + 1\n else:\n currPlayer = 0\n\n\n #----------------------------------------\n # Phases of a turn\n #----------------------------------------\n class StartPhase(State): #Begin the turn\n tag = \"!\"\n class PreplayPhase(State): #Things that can occur before a play\n tag = \"@\"\n class PlayPhase(State): #Things that occur to constitute a play\n tag = \"#\"\n class PostplayPhase(State): #Things that can occur after a play\n tag = \"$\"\n class EndPhase(State): #End the turn\n tag = \"%\"\n def quit(self):\n return True\n def onExit(self):\n return self.model.game.checkForVictory()\n\n #--------------------------------------------\n # State machine to abstractly define a turn.\n # m is the machine\n # s is the list of states\n # t is the list of transitions\n #--------------------------------------------\n def turnSpec(self, m, s, t):\n start = s(\"!\")\n preplay = s(\"@\")\n play = s(\"#\")\n postplay = s(\"$\")\n end = s(\"%\")\n t(\"start\", m.true, start)\n t(start, self.startGuards, preplay)\n t(preplay, self.preplayGuards, play)\n t(play, self.doPlay, postplay)\n t(postplay, self.postplayGuards, end)\n\n #-------------------------------------------\n # Methods to be extended by implementation.\n # Use to define rules of the game.\n #-------------------------------------------\n def startGuards(self):\n return True\n\n def preplayGuards(self):\n return True\n\n def doPlay(self):\n return True\n\n def postplayGuards(self):\n return True\n\n def checkForVictory(self):\n return True\n\n\n #----------------------------------------\n # Phases of the game\n #----------------------------------------\n class GameStart(State):\n tag = \"^\"\n class Turn(State):\n tag = \"&\"\n class GameOver(State):\n tag = \"*\"\n def quit(self):\n return True\n def onExit(self):\n self.model.game.endGame()\n self.model.game.playing = False\n\n #--------------------------------------------\n # State machine to abstractly define the game.\n # m is the machine\n # s is the list of states\n # t is the list of transitions\n #--------------------------------------------\n def gameSpec(self, m, s, t):\n start = s(\"^\")\n turn = s(\"&\")\n end = s(\"*\")\n t(\"start\", m.true, start)\n t(start, self.pregameActions, turn)\n t(turn, self.runTurn, end)\n t(turn, m.true, turn)\n # TODO Make this work\n\n #-------------------------------------------\n # Methods to be extended by implementation.\n # Use to define what happens before and after\n # the game.\n #-------------------------------------------\n def pregameActions(self):\n print(\"Game started\")\n return True\n\n def endGame(self):\n print(\"Game completed\")\n\n #--------------------------------------------\n # Methods to control the state machines.\n # Can be extended but shouldn't be.\n #--------------------------------------------\n\n def runTurn(self):\n self.currentPlayer = next(self.nextPlayer)\n return make(InnerMachine(self.currentPlayer.name + \"\\'s turn.\",self.currentPlayer,self),self.turnSpec).run()\n\n\n def runGame(self):\n self.playing = True\n make(OuterMachine(\"Welcome to the game!\", len(self.players), self),self.gameSpec).run()\n\n\"\"\"\n#----------------------------------------\n# Tests\n#----------------------------------------\n\n@testFramework\ndef tryGame():\n game = Game(None)\n game.runGame()\n\n\n\n# ---------------------------------------\nif __name__ == \"__main__\":\n rseed()\n testFramework()\n\"\"\"\n" }, { "alpha_fraction": 0.5564059019088745, "alphanum_fraction": 0.5666099786758423, "avg_line_length": 31.07272720336914, "blob_id": "a61393fc48321684bdd04a0be792a98d62c898cf", "content_id": "7e06336e24e29d815931f89a3317a77d6c4cee1a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3528, "license_type": "no_license", "max_line_length": 161, "num_lines": 110, "path": "/proj3/server.py", "repo_name": "Kyekifino/PLM18-Group-E", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n#Boiler plate code taken from https://medium.com/swlh/lets-write-a-chat-app-in-python-f6783a9ac170\n\"\"\"Server for multithreaded (asynchronous) chat application.\"\"\"\nfrom socket import AF_INET, socket, SOCK_STREAM, timeout\nfrom threading import Thread, Timer\nfrom player import Player\n\n#----------------------------------------\n# Import a class of the game\n#----------------------------------------\ngameName = input(\"Name of the game? \")\nGameClass = \"\"\nattempts = 20\n\nwhile attempts > 0:\n try:\n _temp = __import__(gameName, globals(), locals(), [gameName])\n GameClass = getattr(_temp, gameName.title())\n break\n except:\n print('Use a valid game name without .py')\n gameName = input(\"Name of the game? \")\n attempts -= 1\n continue\n\n#Exit if attempted 20 times so we don't have to use a whie True\nif (attempts == 0 and GameClass == \"\"):\n print('Try again.')\n quit()\n\n#----------------------------------------\n# Server side communication\n#----------------------------------------\ngame = None\n\ndef accept_incoming_connections():\n \"\"\"Sets up handling for incoming clients.\"\"\"\n global game, gameName\n while True:\n try:\n client, client_address = SERVER.accept()\n if game is None or game.playing is False:\n print(\"%s:%s has connected.\" % client_address)\n client.send(bytes(gameName, \"utf8\"))\n client.send(bytes(\"Welcome to the \" + gameName + \" server! Now type your name and press enter to join!\", \"utf8\"))\n Thread(target=handle_client, args=(client,)).start()\n else:\n client.send(bytes(\"A game is in session. Please come back later...\", \"utf8\"))\n except timeout:\n pass\n\ndef wait_for_game():\n global game\n while game is None or game.playing is False:\n pass\n game.runGame()\n\ndef handle_client(client): # Takes client socket as argument.\n global game\n \"\"\"Handles a single client connection.\"\"\"\n name = client.recv(BUFSIZ).decode(\"utf8\")\n\n if game is None:\n p = Player(name, client)\n game = GameClass([p])\n else:\n while name in [p.name for p in game.players]:\n client.send(bytes(\"Another user has that name. Try again.\", \"utf8\"))\n name = client.recv(BUFSIZ).decode(\"utf8\")\n p = Player(name, client)\n game.players.append(p)\n welcome = 'Welcome %s! If you ever want to quit, type {quit} to exit. To see all people in the room, type {players}. To start the game, type {start}.' % name\n p.tell(welcome)\n\n msg = \"%s has joined the chat!\" % name\n game.broadcast(msg)\n\n while True:\n msg = client.recv(BUFSIZ).decode(\"utf8\")\n if msg != \"{quit}\":\n game.parseAction(p, msg)\n else:\n client.send(bytes(\"{quit}\", \"utf8\"))\n client.close()\n game.broadcast(\"%s has left the chat.\" % name)\n break\n\n#----------------------------------------\nHOST = ''\nPORT = 33000\nBUFSIZ = 1024\nADDR = (HOST, PORT)\n\nSERVER = socket(AF_INET, SOCK_STREAM)\nSERVER.bind(ADDR)\n\n#----------------------------------------\n\nif __name__ == \"__main__\":\n game = None\n SERVER.settimeout(0.2)\n SERVER.listen(5)\n print(\"Waiting for connection...\")\n ACCEPT_THREAD = Thread(target=accept_incoming_connections)\n ACCEPT_THREAD.start()\n GAME_THREAD = Thread(target=wait_for_game)\n GAME_THREAD.start()\n GAME_THREAD.join()\n ACCEPT_THREAD.join()\n SERVER.close()\n" }, { "alpha_fraction": 0.5956616997718811, "alphanum_fraction": 0.6151596307754517, "avg_line_length": 32.08871078491211, "blob_id": "75fde32d937ec347e80bc66680b0fb72980c4cf1", "content_id": "8861fe520a27d87e0938f0b72041effa7d4d70cd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4103, "license_type": "no_license", "max_line_length": 128, "num_lines": 124, "path": "/proj3/client.py", "repo_name": "Kyekifino/PLM18-Group-E", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\n#Boiler plate code taken from https://medium.com/swlh/lets-write-a-chat-app-in-python-f6783a9ac170\n\"\"\"Script for tkinter GUI chat client.\"\"\"\nfrom socket import AF_INET, socket, SOCK_STREAM\nfrom threading import Thread\nimport tkinter\n\nnameSet = False\ntop = tkinter.Tk()\ntop.title(\"Card Game\")\ngameName = False\nphotos = []\n\ndef receive():\n \"\"\"Handles receiving of messages.\"\"\"\n global nameSet, gameName, labels, photos\n\n while True:\n try:\n if not gameName:\n gameName = client_socket.recv(BUFSIZ).decode(\"utf8\")\n top.title(gameName)\n continue\n msg = client_socket.recv(BUFSIZ).decode(\"utf8\")\n if \"Another user has that name. Try again.\" in msg:\n nameSet = False\n top.title(gameName)\n if \"//{hand}//\" in msg:\n cards = []\n cards = msg[11:len(msg) - 1].split(', ')\n hand_canvas.delete(\"all\")\n photos = [None]*len(cards)\n for i in range(len(cards)):\n try:\n photos[i] = tkinter.PhotoImage(file = \".\\\\pictures\\\\\" + cards[i] + \".png\", width = \"69\", height = \"106\")\n except tkinter.TclError:\n continue\n if 72*(i+1) > 600:\n hand_canvas.config(scrollregion=(0,0,72*(i+1),115))\n else:\n hand_canvas.config(scrollregion=(0,0,600,115))\n hand_canvas.create_image(72*i, 3, image=photos[i], anchor=tkinter.NW)\n continue\n msg_list.config(state=tkinter.NORMAL)\n msg_list.insert(tkinter.END, msg + \"\\n\")\n msg_list.see(tkinter.END)\n msg_list.config(state=tkinter.DISABLED)\n except OSError: # Possibly client has left the chat.\n break\n\n\ndef send(event=None): # event is passed by binders.\n \"\"\"Handles sending of messages.\"\"\"\n global nameSet\n global gameName\n msg = my_msg.get()\n if \"//{hand}//\" in msg:\n my_msg.set(\"\")\n return\n if not nameSet:\n top.title(gameName + \": \" + msg)\n nameSet = True\n my_msg.set(\"\") # Clears input field.\n client_socket.send(bytes(msg, \"utf8\"))\n if msg == \"{quit}\":\n client_socket.close()\n top.quit()\n\n\ndef on_closing(event=None):\n \"\"\"This function is to be called when the window is closed.\"\"\"\n my_msg.set(\"{quit}\")\n send()\n\nmessages_frame = tkinter.Frame(top)\nhand_frame = tkinter.Frame(top)\n\nhand_canvas = tkinter.Canvas(hand_frame, height = 115, width= 600,scrollregion=(0,0,600,115))\nh_bar = tkinter.Scrollbar(hand_frame,orient=tkinter.HORIZONTAL)\nh_bar.config(command=hand_canvas.xview)\nhand_canvas.config(xscrollcommand=h_bar.set)\n\n\nmy_msg = tkinter.StringVar() # For the messages to be sent.\nmy_msg.set(\"Your name here...\")\nscrollbar = tkinter.Scrollbar(messages_frame) # To navigate through past messages.\n# Following will contain the messages.\nmsg_list = tkinter.Text(messages_frame, height=20, width=75, yscrollcommand=scrollbar.set, wrap=tkinter.WORD)\nscrollbar.pack(side=tkinter.RIGHT, fill=tkinter.Y)\nmsg_list.pack(side=tkinter.LEFT, fill=tkinter.BOTH)\nmsg_list.pack()\nmsg_list.config(state=tkinter.DISABLED)\nh_bar.pack(side=tkinter.TOP, fill=tkinter.X)\nhand_canvas.pack(side=tkinter.TOP)\nhand_frame.pack()\nmessages_frame.pack()\n\nentry_field = tkinter.Entry(top, textvariable=my_msg, width=50)\nentry_field.bind(\"<Return>\", send)\nentry_field.bind(\"<FocusIn>\", lambda args: entry_field.delete('0', 'end'))\nentry_field.pack()\nsend_button = tkinter.Button(top, text=\"Send\", command=send)\nsend_button.pack()\n\ntop.protocol(\"WM_DELETE_WINDOW\", on_closing)\n\n#----Now comes the sockets part----\nHOST = input('Enter host: ')\nPORT = 33000\nif not PORT:\n PORT = 33000\nelse:\n PORT = int(PORT)\n\nBUFSIZ = 1024\nADDR = (HOST, PORT)\n\nclient_socket = socket(AF_INET, SOCK_STREAM)\nclient_socket.connect(ADDR)\n\nreceive_thread = Thread(target=receive)\nreceive_thread.start()\ntkinter.mainloop() # Starts GUI execution.\n" }, { "alpha_fraction": 0.4162895977497101, "alphanum_fraction": 0.43589743971824646, "avg_line_length": 26.625, "blob_id": "aec4baa6e3835688370f13fc3ec8a42978855dfa", "content_id": "5f7adb54dbeb090add31c7ceac6e1c4db79f9832", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 663, "license_type": "no_license", "max_line_length": 60, "num_lines": 24, "path": "/proj3/testFramework.py", "repo_name": "Kyekifino/PLM18-Group-E", "src_encoding": "UTF-8", "text": "# vim: set filetype=python ts=2 sw=2 sts=2 expandtab:\n\nimport traceback, time, re\n\ndef about(f):\n print(\"\\n-----| %s |-----------------\" % f.__name__)\n if f.__doc__:\n print(\"# \" + re.sub(r'\\n[ \\t]*', \"\\n# \", f.__doc__))\n\nTRY = FAIL = 0\n\ndef testFramework(f=None):\n global TRY, FAIL\n if f:\n try:\n TRY += 1; about(f); f(); print(\"# pass\");\n except:\n FAIL += 1; print(traceback.format_exc());\n return f\n else:\n print(\"\\n# %s TRY= %s ,FAIL= %s ,%%PASS= %s\" % (\n time.strftime(\"%d/%m/%Y, %H:%M:%S,\"),\n TRY, FAIL,\n int(round((TRY - FAIL) * 100 / (TRY + 0.001)))))\n" }, { "alpha_fraction": 0.5817217826843262, "alphanum_fraction": 0.5859326124191284, "avg_line_length": 35.431819915771484, "blob_id": "9bfb3aefe971a191f56c568c0bf435bee8ff76a5", "content_id": "71944dc052f8601c713900a0f1397d606b3d7c40", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6412, "license_type": "no_license", "max_line_length": 133, "num_lines": 176, "path": "/proj3/Cheat.py", "repo_name": "Kyekifino/PLM18-Group-E", "src_encoding": "UTF-8", "text": "from Game import Game\nfrom time import time\nfrom Cards import Deck, Card, CardStack, NotInStackException, ranks\nfrom testFramework import testFramework\nfrom player import Player\n\n# Implementation of Cheat as a Game\n\nclass Cheat(Game):\n\n def __init__(self, players):\n super(Cheat, self).__init__(players)\n self.playFlag = False\n self.cheatcaller = None\n self.nextRank = iter(self.nextRankIterFunc())\n self.currentPlayer = None\n self.currentRank = None\n # Discard pile used to pick up with cheat moves\n self.discard = Deck()\n # Buffer deck to hold cards while cheaters may cheat\n self.bufferDeck = CardStack()\n # Dictionary to define possible actions to take\n self.actions = { \"{players}\" : self.getPlayers,\n \"{start}\" : self.playCheat,\n \"{play}\" : self.playCards,\n \"{hand}\" : self.showHand,\n \"{cheat}\" : self.callCheat,\n \"{help}\" : self.getHelp }\n\n #---------------------------------------------------\n # Current rank iterator\n #---------------------------------------------------\n def nextRankIterFunc(self):\n currRank = 0\n while True:\n yield ranks()[currRank]\n if (currRank < (len(ranks()) - 1)):\n currRank += 1\n else:\n currRank = 0\n\n #---------------------------------------------------\n # Defining game actions\n #---------------------------------------------------\n def getHelp(self, player, msglist):\n player.tell(\"To play cards on your turn, write {play} followed by the cards. \")\n player.tell(\"For example, write \\\"{play} H4 S4\\\" to play the 4 of Hearts and the 4 of Spades. \")\n player.tell(\"If you think a player played cards that aren't of the current rank, announce {cheat}. \")\n player.tell(\"If they were lying, they have to pick up all the played cards... but if the weren't... you do! \")\n player.tell(\"To see your hand, write {hand}. For help, write {help}.\")\n\n def showHand(self, player, msglist):\n player.tell(\"The cards in your hand:\")\n player.tell(player.getHand())\n\n def playCheat(self, player, msglist):\n if len(self.players) < 3:\n player.tell(\"Not enough players to start the game...\")\n else:\n self.playing = True\n\n def getPlayers(self, player, msglist):\n player.tell(\"Current players: \")\n msg = \"\"\n for p in self.players:\n msg += (p.name + \" --- hand size: \" + str(p.getHandSize()) + \"\\n\")\n player.tell(msg[:-1])\n\n def playCards(self, player, msglist):\n if not self.playing:\n player.tell(\"Wait for the game to start...\")\n elif player != self.currentPlayer:\n player.tell(\"Wait for your turn...\")\n elif len(msglist) == 1:\n player.tell(\"You have to play a card.\")\n else:\n cards = msglist[1:]\n playedCards = []\n for card in cards:\n card = Card(str(card[0]),str(card[1:]))\n playedCards.append(card)\n try:\n player.playFromHand(playedCards, self.bufferDeck)\n self.showGUIHand(self.currentPlayer)\n self.broadcast(player.name + \" has played \" + str(self.bufferDeck.size()) + \" card(s).\")\n self.broadcast(\"They currently hold \" + str(player.hand.size()) + \" cards.\")\n self.playFlag = True\n except NotInStackException:\n player.tell(\"You can only play cards that are in your hand.\")\n\n\n def callCheat(self, player, msglist):\n if not self.playing:\n player.tell(\"Wait for the game to start...\")\n elif player == self.currentPlayer:\n player.tell(\"You can\\'t call Cheat on yourself...\")\n else:\n self.cheatCaller = player\n\n #---------------------------------------------------\n # Defining game rules\n #---------------------------------------------------\n\n def pregameActions(self):\n # Set to players\n self.nextPlayer = iter(self.nextPlayerIterFunc())\n # Make game announcements\n self.broadcast(\"The Cheat Game is starting!\")\n self.broadcast(\"There are %d players playing!\" % len(self.players))\n self.wait(1)\n for p in self.players:\n self.getHelp(p, None)\n self.wait(2)\n self.deck.shuffle()\n while not self.deck.isEmpty():\n self.currentPlayer = next(self.nextPlayer)\n self.currentPlayer.addToHand(self.deck.draw())\n for p in self.players:\n self.showGUIHand(p)\n return True\n\n def preplayGuards(self):\n self.currentRank = next(self.nextRank)\n self.broadcast(\"It is %s\\'s turn!\" % self.currentPlayer.name)\n self.wait(.25)\n self.broadcast(\"The rank this turn is \" + self.currentRank + \".\")\n self.wait(1)\n self.showHand(self.currentPlayer, None)\n return True\n\n def doPlay(self):\n while not self.playFlag:\n pass\n self.playFlag = False\n return True\n\n def postplayGuards(self):\n cheating = False\n for c in self.bufferDeck.cards:\n self.discard += c\n if c.rank != self.currentRank:\n cheating = True\n self.bufferDeck.empty()\n self.wait(1)\n self.broadcast(\"You have 10 seconds to announce {cheat}.\")\n self.cheatCaller = None\n t_end = time() + 10\n while self.cheatCaller is None and time() < t_end:\n pass\n if self.cheatCaller is not None:\n self.broadcast(self.cheatCaller.name + \" has called Cheat!\")\n self.wait(2)\n if cheating:\n self.broadcast(\"%s was cheating, and has to pick up the stack! \" % self.currentPlayer.name)\n while not self.discard.isEmpty():\n self.currentPlayer.addToHand(self.discard.draw())\n self.showGUIHand(self.currentPlayer)\n else:\n self.broadcast(\"%s wasn't cheating... %s has to pick up the stack...\" % (self.currentPlayer.name, self.cheatCaller.name))\n while not self.discard.isEmpty():\n self.cheatCaller.addToHand(self.discard.draw())\n self.showGUIHand(self.cheatCaller)\n else:\n self.broadcast(\"Time's up!\")\n if not self.discard.isEmpty():\n self.wait(.5)\n self.broadcast(\"The discard pile has %d cards in it.\" % self.discard.size())\n return True\n\n def checkForVictory(self):\n return self.currentPlayer.hand.isEmpty()\n\n def endGame(self):\n self.wait(1)\n self.broadcast(self.currentPlayer.name + \" has emptied their hand, and wins!\")\n self.broadcast(\"Thanks for playing!\")\n" }, { "alpha_fraction": 0.4448336362838745, "alphanum_fraction": 0.4658493995666504, "avg_line_length": 20.961538314819336, "blob_id": "45115dc7edc61adcb940257f3684995544aed765", "content_id": "ac6068298a43bc31e28eed7e3d8fa17e7bd7e8db", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 571, "license_type": "no_license", "max_line_length": 45, "num_lines": 26, "path": "/proj3/gametree.py", "repo_name": "Kyekifino/PLM18-Group-E", "src_encoding": "UTF-8", "text": "from Bartok import canPlay\n\nclass Node(object):\n def __init__(self, root, depth):\n self.value = root\n self.size = 0\n self.children = []\n self.depth = depth;\n if (self.depth == 1):\n for (card in myDeck):\n if (Bartok.canPlay())\n \n def addChild(self, child):\n self.children.append(Node(child))\n self.size += 1\n\ngameRepr = {\n currCard = 'S3'\n players = [\n {'hand': 5},\n {'hand': 5},\n {'hand': 5}\n ],\n myDeck = ['DA', 'S4', 'S2', 'H10', 'DK'],\n turn = 1\n}\n" }, { "alpha_fraction": 0.5031712651252747, "alphanum_fraction": 0.5038759708404541, "avg_line_length": 27.959182739257812, "blob_id": "e5480afd359e065265f3bbea4388afcdd9bf308f", "content_id": "cefcb0b5366645e3b23b0f70f693348e9d6f02b9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1419, "license_type": "no_license", "max_line_length": 82, "num_lines": 49, "path": "/proj3/player.py", "repo_name": "Kyekifino/PLM18-Group-E", "src_encoding": "UTF-8", "text": "from Cards import CardStack, NotInStackException\n\n#---------------------------------------------------\n# Defining game actions\n#---------------------------------------------------\nclass Player(object):\n \"\"\"Represents a player object\"\"\"\n def __init__(self, name, conn):\n self.name = name\n self.hand = CardStack()\n self.connection = conn\n\n def __eq__(self, other):\n if isinstance(other, self.__class__):\n return self.__dict__ == other.__dict__\n else:\n return False\n\n def __repr__(self):\n return self.name\n\n def getHand(self):\n return str(self.hand.cards)\n\n def getHandSize(self):\n return len(self.hand.cards)\n\n def addToHand(self, cards):\n if isinstance(cards, list):\n for card in cards:\n self.hand += card\n else:\n self.hand += cards\n\n def playFromHand(self, cards, stack):\n if isinstance(cards, list):\n for card in cards:\n if card not in self.hand.cards:\n raise NotInStackException\n #Will throw NotInStackException if a card in cards is not in self.hand\n for card in cards:\n self.hand -= card\n stack += card\n else:\n self.hand += cards\n stack += cards\n\n def tell(self, message):\n self.connection.send(bytes(message, \"utf8\"))\n" }, { "alpha_fraction": 0.4224507212638855, "alphanum_fraction": 0.43144816160202026, "avg_line_length": 25.827587127685547, "blob_id": "634590ab393a19a4512a4371a196755a83e3f154", "content_id": "89757270dc333f22ece5e92fa62c3f52cbb91be4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2334, "license_type": "no_license", "max_line_length": 50, "num_lines": 87, "path": "/proj2/Cards.py", "repo_name": "Kyekifino/PLM18-Group-E", "src_encoding": "UTF-8", "text": "# For use in setting up various card games.\nfrom random import shuffle\n\nclass Card(object):\n \"\"\"Represents a card object.\"\"\"\n\n def __init__(self, suit, rank):\n self.suit = suit\n self.rank = rank\n def __repr__(self):\n return str(self.suit) + str(self.rank)\n def __str__(self):\n return str(self.suit) + str(self.rank)\n def __eq__(self, other):\n if isinstance(other, self.__class__):\n return self.__dict__ == other.__dict__\n else:\n return False\n\nclass CardStack(object):\n \"\"\"Represents any such stack of cards.\"\"\"\n\n def __init__(self):\n self.cards = []\n self.isVisible = True\n def __str__(self):\n str = \"\"\n for card in self.cards:\n str = str + card + \", \"\n return str[0:-2]\n def __repr__(self):\n str = \"\"\n for card in self.cards:\n str = str + card + \", \"\n return str[0:-2]\n\nclass Deck(CardStack):\n \"\"\"Represents a deck of cards.\"\"\"\n\n def isEmpty(self):\n return len(self.cards) == 0\n\n def shuffle(self):\n shuffle(self.cards)\n\n def lastCard(self):\n return self.cards[len(self.cards) - 1]\n\n def changeVisibility(self):\n if self.isVisible:\n self.isVisible = False\n else :\n self.isVisible = True\n\n def draw(self):\n return self.cards.pop()\n\n def addToDeck(self, c):\n self.cards.append(c)\n\n def fillDeck(self):\n if not self.cards:\n for suit in range(4):\n for rank in range(1, 14):\n if suit == 0:\n suit = \"H\"\n if suit == 1:\n suit = \"D\"\n if suit == 2:\n suit = \"C\"\n if suit == 3:\n suit = \"S\"\n if rank == 1:\n rank = \"A\"\n elif rank == 11:\n rank = \"J\"\n elif rank == 12:\n rank = \"Q\"\n elif rank == 13:\n rank = \"K\"\n else:\n rank = str(rank)\n card = Card(suit, rank)\n self.cards.append(card)\n\n def resetDeck(self):\n self.cards = []\n" }, { "alpha_fraction": 0.4557377099990845, "alphanum_fraction": 0.46393442153930664, "avg_line_length": 24.847457885742188, "blob_id": "2c22ecfa4afc14c8691d92a41e6586a2028d3d8e", "content_id": "f72b3934ee13f49401c3d733581e31f28e831bc8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3050, "license_type": "no_license", "max_line_length": 73, "num_lines": 118, "path": "/proj3/Cards.py", "repo_name": "Kyekifino/PLM18-Group-E", "src_encoding": "UTF-8", "text": "from random import shuffle\n\nclass NotInStackException(Exception):\n pass\n\n#----------------------------------------\n# Get suits\n#----------------------------------------\ndef suits():\n return \"H D C S\".split(\" \")\n\n#----------------------------------------\n# Get ranks\n#----------------------------------------\ndef ranks():\n return \"A 2 3 4 5 6 7 8 9 10 J Q K\".split(\" \")\n\n#----------------------------------------\n# Card class for a single card\n#----------------------------------------\nclass Card(object):\n \"\"\"Represents a card object.\"\"\"\n def __init__(self, suit, rank):\n self.suit = suit\n self.rank = rank\n def __repr__(self):\n return str(self.suit) + str(self.rank)\n def __eq__(self, other):\n if isinstance(other, self.__class__):\n return self.__dict__ == other.__dict__\n else:\n return False\n\n#----------------------------------------\n# Stack class for hands\n#----------------------------------------\nclass CardStack(object):\n \"\"\"Represents any such stack of cards.\"\"\"\n def __init__(self):\n self.cards = []\n\n def __str__(self):\n return self.__repr__()\n\n def __repr__(self):\n s = \"\"\n for card in self.cards:\n s = s + str(card) + \", \"\n return s[0:-2]\n\n def __iadd__(self,other):\n self.cards.append(other)\n return self\n\n def __isub__(self,other):\n try:\n self.cards.remove(other)\n return self\n except ValueError:\n raise NotInStackException\n\n def size(self):\n return len(self.cards)\n\n def isEmpty(self):\n return len(self.cards) == 0\n\n def empty(self):\n self.cards = []\n\n#----------------------------------------\n# Deck for full deck of cards\n#----------------------------------------\nclass Deck(CardStack):\n \"\"\"Represents a deck of cards.\"\"\"\n\n def shuffle(self):\n shuffle(self.cards)\n\n def lastCard(self):\n return self.cards[len(self.cards) - 1]\n\n def remove(self, removedCard):\n self.cards.remove(removedCard)\n return removedCard\n \n def draw(self):\n return self.cards.pop()\n\n def addToDeck(self, c):\n self.cards.append(c)\n\n def fillDeck(self):\n if not self.cards:\n for suit in suits():\n for rank in ranks():\n card = Card(suit, rank)\n self.cards.append(card)\n\n def dealDeck(self, players):\n currPlayer = 0\n while not self.isEmpty:\n players[currPlayer].addToHand(self.draw())\n if currPlayer < (len(players) - 1):\n currPlayer += 1\n else:\n currPlayer = 0\n\n def dealCards(self, players, numCards):\n currPlayer = 0\n cardsDealt = 0\n while cardsDealt < (numCards*len(players)) and len(self.cards) > 0:\n players[currPlayer].addToHand(self.draw())\n if currPlayer < (len(players) - 1):\n currPlayer += 1\n else:\n currPlayer = 0\n cardsDealt += 1\n" }, { "alpha_fraction": 0.5619022846221924, "alphanum_fraction": 0.5707762837409973, "avg_line_length": 31.69577407836914, "blob_id": "cb9a5878ed762a6eab35e0cf69925f38daf0c907", "content_id": "5ace58cd275c38f9b4be2a1f732f863adc477cda", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11607, "license_type": "no_license", "max_line_length": 158, "num_lines": 355, "path": "/proj2/cheatServer.py", "repo_name": "Kyekifino/PLM18-Group-E", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\n#Boiler plate code taken from https://medium.com/swlh/lets-write-a-chat-app-in-python-f6783a9ac170\n\"\"\"Server for multithreaded (asynchronous) chat application.\"\"\"\nimport time\nfrom socket import AF_INET, socket, SOCK_STREAM, timeout\nfrom threading import Thread, Timer\nfrom stateMachineFramework import State, OuterMachine, InnerMachine, make\nfrom player import playedDeck, unplayedDeck, Player\nfrom cards import Card\n\n\n\ndef accept_incoming_connections():\n \"\"\"Sets up handling for incoming clients.\"\"\"\n global cheatGame\n cheatGame = None\n while cheatGame is None:\n try:\n client, client_address = SERVER.accept()\n except timeout:\n pass\n else:\n if cheatGame is None:\n print(\"%s:%s has connected.\" % client_address)\n client.send(bytes(\"Welcome to the Cheat server! Now type your name and press enter to join!\", \"utf8\"))\n addresses[client] = client_address\n Thread(target=handle_client, args=(client,)).start()\n cheatGame.run()\n\n\ndef handle_client(client): # Takes client socket as argument.\n \"\"\"Handles a single client connection.\"\"\"\n name = client.recv(BUFSIZ).decode(\"utf8\")\n while name in clients.values():\n client.send(bytes(\"Another user has that name. Try again.\", \"utf8\"))\n name = client.recv(BUFSIZ).decode(\"utf8\")\n welcome = 'Welcome %s! If you ever want to quit, type {quit} to exit. To see all users in the room, type {users}. To start the game, type {start}.' % name\n client.send(bytes(welcome, \"utf8\"))\n msg = \"%s has joined the chat!\" % name\n broadcast(bytes(msg, \"utf8\"))\n clients[client] = name\n\n while True:\n msg = client.recv(BUFSIZ).decode(\"utf8\")\n msglist = msg.split()\n if msglist[0] != \"{quit}\":\n if msglist[0] not in commands:\n broadcast(bytes(msg, \"utf8\"), name+\": \")\n else:\n commands[msglist[0]](client, msglist)\n else:\n client.send(bytes(\"{quit}\", \"utf8\"))\n client.close()\n del clients[client]\n broadcast(bytes(\"%s has left the chat.\" % name, \"utf8\"))\n break\n\n\ndef broadcast(msg, prefix=\"\"): # prefix is for name identification.\n \"\"\"Broadcasts a message to all the clients.\"\"\"\n\n for sock in clients:\n sock.send(bytes(prefix, \"utf8\")+msg)\n\n#----------------------------------------\n# Usable commands\n#----------------------------------------\n\ndef showHand(client, args):\n name = str(clients[client])\n msg = \"Hand: \" + players[name].getHand()\n client.send(bytes(msg, \"utf8\"))\n\ndef playSomeCards(client, args):\n global cheatGame\n global turnFlag\n if cheatGame is None:\n client.send(bytes(\"Wait for the game to start...\", \"utf8\"))\n elif clients[client] == currentPlayer:\n myHand = players[currentPlayer].hand.cards\n for card in args:\n myCard = Card(card[0], card[1:])\n if myCard in myHand:\n cardsPlayed.append(myCard)\n if len(cardsPlayed) > 0:\n players[currentPlayer].playCards(cardsPlayed)\n broadcast(bytes(currentPlayer + \" played \" + str(len(cardsPlayed)) + \" cards.\", \"utf8\"))\n turnFlag = True\n else:\n client.send(bytes(\"You must play at least one card.\", \"utf8\"))\n else:\n client.send(bytes(\"Wait for your turn...\", \"utf8\"))\n\ndef players(client, args):\n msg = \"Current players: \"\n \"\"\"Show names and addresses of all players\"\"\"\n for p in clients:\n msg = msg + str(clients[p]) + \", \"\n msg = msg[0:-2]\n client.send(bytes(msg, \"utf8\"))\n\ndef callCheat(client, args):\n global cheatFlag\n global cheating\n if cheatFlag:\n broadcast(bytes(\"%s has called cheat... \" % clients[client], \"utf8\"))\n if cheating:\n broadcast(bytes(\"%s was cheating, and has to pick up the stack! \" % currentPlayer, \"utf8\"))\n players[currentPlayer].takeAll()\n else:\n broadcast(bytes(\"%s wasn't cheating... %s has to pick up the stack...\" % (currentPlayer, clients[client]), \"utf8\"))\n players[clients[client]].takeAll()\n else:\n broadcast(bytes(\"Can't call cheat now.\", \"utf8\"))\n return False\n\n\ndef playCheat(client, args):\n global cheatGame\n if cheatGame is None and len(clients) >= 3:\n broadcast(bytes(\"%s has decided to start a game of cheat!\\n\" % clients[client], \"utf8\"))\n broadcast(bytes(\"There are %d players playing!\\n\" % len(clients), \"utf8\"))\n time.sleep(.5)\n broadcast(bytes(\"To play cards on your turn, write {play} followed by the cards.\\n\", \"utf8\"))\n broadcast(bytes(\"For example, write \\\"{play} H4 S4\\\" to play the 4 of Hearts and the 4 of Spades.\\n\", \"utf8\"))\n broadcast(bytes(\"If you think a player played cards that aren't of the current rank, announce {cheat}\\n\", \"utf8\"))\n broadcast(bytes(\"If they were lying, they have to pick up all the played cards... but if the weren't... you do!\\n\", \"utf8\"))\n broadcast(bytes(\"To see your hand, write {hand}. For help, write {help}.\\n\", \"utf8\"))\n time.sleep(.5)\n unplayedDeck.fillDeck()\n unplayedDeck.shuffle()\n for p in clients: #Init players\n players[clients[p]] = Player(clients[p])\n while not unplayedDeck.isEmpty():\n for p in players:\n if not unplayedDeck.isEmpty():\n players[p].draw()\n for p in clients:\n showHand(p, [])\n cheatGame = make(OuterMachine(\"Welcome to the game!\", len(clients)), cheatSpec)\n elif len(clients) < 3:\n client.send(bytes(\"Not enough players to start!\", \"utf8\"))\n else:\n client.send(bytes(\"A game of cheat is currently occurring!\", \"utf8\"))\n\ndef getHelp(client, args):\n client.send(bytes(\"To play cards on your turn, write {play} followed by the cards.\\n\", \"utf8\"))\n client.send(bytes(\"For example, write \\\"{play} H4 S4\\\" to play the 4 of Hearts and the 4 of Spades.\\n\", \"utf8\"))\n client.send(bytes(\"If you think a player played cards that aren't of the current rank, announce {cheat}\\n\", \"utf8\"))\n client.send(bytes(\"If they were lying, they have to pick up all the played cards... but if the weren't... you do!\\n\", \"utf8\"))\n client.send(bytes(\"To see your hand, write {hand}. For help, write {help}.\\n\", \"utf8\"))\n\n#----------------------------------------\n\ncommands = { \"{users}\" : players,\n \"{start}\" : playCheat,\n \"{play}\" : playSomeCards,\n \"{hand}\" : showHand,\n \"{cheat}\" : callCheat,\n \"{help}\" : getHelp }\nclients = {}\naddresses = {}\nplayers = {}\ncheatGame = None\ncheating = False\nturnFlag = False\ncheatFlag = False\nvictory = False\ncurrentPlayer = None\ncardsPlayed = []\n\n\nHOST = ''\nPORT = 33000\nBUFSIZ = 1024\nADDR = (HOST, PORT)\n\nSERVER = socket(AF_INET, SOCK_STREAM)\nSERVER.bind(ADDR)\n\n#----------------------------------------\n# Cheat game specifications\n#----------------------------------------\ndef cheatSpec(m, s, t):\n\n def timeToExit(self):\n return victory\n def repeatTurns(self):\n return not victory\n\n m.leave = timeToExit\n m.repeat = repeatTurns\n player = s(\"player*\")\n exit = s(\"exit.\")\n t(\"start\", m.true, player)\n t(player, m.repeat, player)\n t(player, m.leave, exit)\n\nclass Turn(State):\n global victory\n victory = False\n\n tag = \"*\"\n currPlayer = 0\n currSuit = 1\n\n def onEntry(self):\n global currentPlayer\n p = [clients[k] for k in clients]\n currentPlayer = p[self.currPlayer]\n name = str(currentPlayer + \" is up! \")\n broadcast(bytes(name, \"utf8\"))\n victory = make(InnerMachine(name,self.currPlayer,self.currSuit),cheatTurnSpec).run()\n\n def onExit(self):\n if self.currPlayer < self.model.numPlayers - 1:\n self.currPlayer += 1\n else:\n self.currPlayer = 0\n if self.currSuit < 13:\n self.currSuit += 1\n else:\n self.currSuit = 1\n\nclass GameOver(State):\n tag = \".\"\n\n def quit(self):\n return True\n\n def onExit(self):\n global players\n global cheatGame\n global cheating\n global turnFlag\n global cheatFlag\n global victory\n global currentPlayer\n global cardsPlayed\n players = {}\n cheatGame = None\n cheating = False\n turnFlag = False\n cheatFlag = False\n victory = False\n currentPlayer = None\n cardsPlayed = []\n broadcast(bytes(\"Game over!\\nSay {start} to play again!\", \"utf8\"))\n return True\n\n#----------------------------------------\n# Cheat turn specifications\n#----------------------------------------\ndef cheatTurnSpec(m, s, t):\n def waitForCards(self):\n global cardsPlayed\n global turnFlag\n if m.currRank == 1:\n m.currRank = \"A\"\n if m.currRank == 11:\n m.currRank = \"J\"\n if m.currRank == 12:\n m.currRank = \"Q\"\n if m.currRank == 13:\n m.currRank = \"K\"\n broadcast(bytes(\"Current rank is %s. \" % m.currRank, \"utf8\"))\n p = [k for k in clients]\n showHand(p[m.currPlayer], [])\n cardsPlayed = []\n turnFlag = False\n while not turnFlag:\n pass\n return True\n\n def waitForCheat(self):\n global cheatFlag\n global cheating\n global victory\n cheating = False\n for c in cardsPlayed:\n if str(c.rank) != str(m.currRank):\n print(c.rank, m.currRank)\n cheating = True\n print(cheating)\n for c in cardsPlayed:\n playedDeck.addToDeck(c)\n if not players[currentPlayer].hand.cards:\n broadcast(bytes(\"%s emptied their hand!\" % currentPlayer, \"utf8\"))\n time.sleep(1)\n if cheating:\n broadcast(bytes(\"%s was cheating with their final play, and must pick up the deck!\" % currentPlayer, \"utf8\"))\n players[currentPlayer].takeAll()\n else:\n broadcast(bytes(\"%s wasn't cheating... and wins!\" % currentPlayer, \"utf8\"))\n victory = True\n return False\n else:\n broadcast(bytes(\"You have five seconds to announce {cheat}.\", \"utf8\"))\n cheatFlag = True\n def timeout():\n cheatFlag = False\n t = Timer(5 , timeout)\n t.start()\n t.join()\n broadcast(bytes(\"Time's up!\", \"utf8\"))\n time.sleep(1)\n return True\n\n m.playcards = waitForCards\n m.cheat = waitForCheat\n play = s(\"play/\")\n check = s(\"check+\")\n nextTurn = s(\"exit=\")\n t(\"start\", m.true, play)\n t(play, m.playcards, check)\n t(check, m.cheat, nextTurn)\n\nclass PlayCards(State):\n tag = \"/\"\n\nclass Check(State):\n tag = \"+\"\n\n def onEntry(self):\n print(\"Entered into check state. Should determine cheating.\")\n\n def quit(self):\n return victory\n\n def onExit(self):\n return True\n\nclass NextTurn(State):\n tag = \"=\"\n\n def quit(self):\n return True\n\n def onEntry(self):\n print(\"Entered into next turn state. Should move to next turn.\")\n\n def onExit(self):\n return False\n\n#----------------------------------------\n\nif __name__ == \"__main__\":\n SERVER.settimeout(0.2)\n SERVER.listen(5)\n print(\"Waiting for connection...\")\n ACCEPT_THREAD = Thread(target=accept_incoming_connections)\n ACCEPT_THREAD.start()\n ACCEPT_THREAD.join()\n SERVER.close()\n" }, { "alpha_fraction": 0.5800529718399048, "alphanum_fraction": 0.5846425294876099, "avg_line_length": 34.40625, "blob_id": "a62279b97f67c43033aafcd1d43b82ccb9021563", "content_id": "82989c6eaf924e3f4155b6732a64cd6ee8a8cb6b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5665, "license_type": "no_license", "max_line_length": 106, "num_lines": 160, "path": "/proj3/BigTwo.py", "repo_name": "Kyekifino/PLM18-Group-E", "src_encoding": "UTF-8", "text": "from Game import Game\nfrom time import time\nfrom Cards import Deck, Card, CardStack, NotInStackException, ranks\nfrom testFramework import testFramework\nfrom player import Player\n\n# Implementation of BigTwo as a Game\n\nclass BigTwo(Game):\n\n def __init__(self, players):\n super(BigTwo, self).__init__(players)\n self.playFlag = False\n self.cheatcaller = None\n self.currentNumCards = 0\n self.nextRank = 0\n self.currentPlayer = None\n self.currentRank = None\n self.endTrick = True\n self.passCount = 0\n # Discard pile used to pick up with cheat moves\n self.discard = Deck()\n # Buffer deck to hold cards while cheaters may cheat\n self.bufferDeck = CardStack()\n # Dictionary to define possible actions to take\n self.actions = { \"{players}\" : self.getPlayers,\n \"{start}\" : self.playBigTwo,\n \"{play}\" : self.playCards,\n \"{hand}\" : self.showHand,\n \"{pass}\" : self.callPass,\n \"{help}\" : self.getHelp }\n\n #---------------------------------------------------\n # Defining game actions\n #---------------------------------------------------\n def getHelp(self, player, msglist):\n player.tell(\"To play cards on your turn, write {play} followed by the cards. \")\n player.tell(\"For example, write \\\"{play} H4 S4\\\" to play the 4 of Hearts and the 4 of Spades. \")\n player.tell(\"If you cannot play any card, type {pass} \")\n player.tell(\"The first player to play decides the number of cards played that round.\")\n player.tell(\"The next player must play the same number of cards of a higher rank !\")\n player.tell(\"To see your hand, write {hand}. For help, write {help}.\")\n\n def showHand(self, player, msglist):\n player.tell(\"The cards in your hand:\")\n player.tell(player.getHand())\n\n def playBigTwo(self, player, msglist):\n if len(self.players) < 3 or len(self.players) >= 5:\n player.tell(\"Need four players to start the game..\")\n else:\n self.playing = True\n\n def getPlayers(self, player, msglist):\n player.tell(\"Current players: \")\n msg = \"\"\n for p in self.players:\n msg += (p.name + \" --- hand size: \" + str(p.getHandSize()) + \"\\n\")\n player.tell(msg[:-1])\n\n def isValidPlay(self, playedCards, list):\n cardRank = playedCards[1].rank\n for card in playedCards:\n if card.rank != cardRank:\n raise ValueError\n if self.currentNumCards == 0:\n self.currentNumCards = len(playedCards)\n elif len(playedCards) == self.currentNumCards:\n if cardRank <= self.currentRank:\n raise ValueError\n self.currentRank = cardRank\n return\n else:\n raise ValueError\n\n def playCards(self, player, msglist):\n if not self.playing:\n player.tell(\"Wait for the game to start...\")\n elif player != self.currentPlayer:\n player.tell(\"Wait for your turn...\")\n elif len(msglist) == 1:\n player.tell(\"You have to play a card.\")\n else:\n cards = msglist[1:]\n playedCards = []\n for card in cards:\n card = Card(str(card[0]),str(card[1:]))\n playedCards.append(card)\n try:\n self.isValidPlay(self, playedCards, self.bufferDeck)\n try:\n player.playFromHand(playedCards, self.bufferDeck)\n self.showGUIHand(self.currentPlayer)\n self.broadcast(player.name + \" has played \" + str(self.bufferDeck.size()) + \" card(s).\")\n self.broadcast(\"They currently hold \" + str(player.hand.size()) + \" cards.\")\n self.playFlag = True\n except NotInStackException:\n player.tell(\"You can only play cards that are in your hand.\")\n except ValueError:\n player.tell(\"You can only play cards that are in your hand.\")\n\n\n\n def callPass(self, player, msglist):\n if not self.playing:\n player.tell(\"Wait for the game to start...\")\n else:\n if self.passCount == 3:\n self.endTrick = True\n else:\n self.passCount += 1\n self.playFlag = True\n\n #---------------------------------------------------\n # Defining game rules\n #---------------------------------------------------\n\n def pregameActions(self):\n # Set to players\n self.nextPlayer = iter(self.nextPlayerIterFunc())\n # Make game announcements\n self.broadcast(\"The Big Two Game is starting!\")\n self.broadcast(\"There are %d players playing!\" % len(self.players))\n self.wait(1)\n for p in self.players:\n self.getHelp(p, None)\n self.wait(2)\n self.deck.shuffle()\n while not self.deck.isEmpty():\n self.currentPlayer = next(self.nextPlayer)\n self.currentPlayer.addToHand(self.deck.draw())\n for p in self.players:\n self.showGUIHand(p)\n return True\n\n def preplayGuards(self):\n self.broadcast(\"It is %s\\'s turn!\" % self.currentPlayer.name)\n self.wait(.25)\n self.broadcast(\"The rank this turn is \" + self.currentRank + \".\")\n self.wait(1)\n self.showHand(self.currentPlayer, None)\n if self.endTrick == True:\n self.currentNumCards = 0\n self.currentRank = 0\n self.endTrick = False\n return True\n\n def doPlay(self):\n while not self.playFlag:\n pass\n self.playFlag = False\n return True\n\n def checkForVictory(self):\n return self.currentPlayer.hand.isEmpty()\n\n def endGame(self):\n self.wait(1)\n self.broadcast(self.currentPlayer.name + \" has emptied their hand, and wins!\")\n self.broadcast(\"Thanks for playing!\")\n" }, { "alpha_fraction": 0.7489845752716064, "alphanum_fraction": 0.7558895349502563, "avg_line_length": 33.676055908203125, "blob_id": "fac0e069b53e8da7f9b6dc23d674370bc626b4d8", "content_id": "22370bc99855e9f400d37b5550b1a1f65f74d5af", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2462, "license_type": "no_license", "max_line_length": 393, "num_lines": 71, "path": "/README.md", "repo_name": "Kyekifino/PLM18-Group-E", "src_encoding": "UTF-8", "text": "# PLM18-Group-E\nGroup E for Programming Languages and Modeling\n\n*Note: These instructions are for the version under the proj3 folder.*\n\nDickerson, Thomas\n\nEgorova, Anastasia\n\nCassandra, Keesee\n\n# Cheat\n\nCheat (also known as B.S., bluff, and I-doubt-it) is a card game where the players aim to get rid of all of their cards. It is a game of deception, with cards being played face-down and players being permitted to lie about the cards they have played. A challenge is usually made by players calling out the name of the game, and the loser of a challenge has to pick up every card played so far.\n\n## Getting Started\n\nUse those instructions if you want to run cheat on your local machine. Note that the game will not start unless a minimum of three players are connected to the server.\n\n### Prerequisites\n\nHave Python installed as python3\n\n### Running\n\nOn the \"host\" computer:\n\nOpen a command shell. On Windows type ipconfig, on Mac type ifconfig. Find your LAN ip address, and write it down for the players of the game. While you can play an instance of the game on the host computer, it is not recommended you do so, as output on the server may give you an unfair advantage.\n```\npython3 .\\server.py\nName of the game? Cheat\n```\n\nOn other consoles:\n\n```\npython3 .\\client.py\nEnter host: [Enter the LAN ip address of the host computer]\n```\n# Bartok\n\nBartok is a card game where the players aim to get rid of all of their cards. To play every player has to either put a card matching the most recent played card by rank/suit or draw another card and skip a turn. If a 2 is played, the next player has to draw two cards and skip a turn. \n\n## Getting Started\n\nUse those instructions if you want to run cheat on your local machine. Note that the game will not start unless a minimum of four and a maximum of 4 players are connected to the server.\n\n### Prerequisites\n\nHave Python installed as python3\n\n### Running\n\nOn the \"host\" computer:\n\nOpen a command shell. On Windows type ipconfig, on Mac type ifconfig. Find your LAN ip address, and write it down for the players of the game. While you can play an instance of the game on the host computer, it is not recommended you do so, as output on the server may give you an unfair advantage.\n```\npython3 .\\server.py\nName of the game? Bartok\n```\n\nOn other consoles:\n\n```\npython3 .\\client.py\nEnter host: [Enter the LAN ip address of the host computer]\n```\n\n\n#### March 18 Branch\nhttps://github.com/Kyekifino/PLM18-Group-E/tree/march18\n" }, { "alpha_fraction": 0.6186887621879578, "alphanum_fraction": 0.6209495067596436, "avg_line_length": 33.02564239501953, "blob_id": "de78c1a9cb9748238536eb880893127e84ec9bdf", "content_id": "d3dc5513bbe3c30355542918e9753f53a5db34db", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1327, "license_type": "no_license", "max_line_length": 105, "num_lines": 39, "path": "/proj2/player.py", "repo_name": "Kyekifino/PLM18-Group-E", "src_encoding": "UTF-8", "text": "from Cards import CardStack, Deck\n\nunplayedDeck = Deck()\nplayedDeck = Deck()\nplayedDeck.changeVisibility()\n\n# stuff outside this class should be in machine, just used it for testing\nclass Player(object):\n \"\"\"Represents a player object\"\"\"\n def __init__(self, name):\n super().__init__()\n self.name = name\n self.hand = CardStack()\n self.handSize = 0\n\n def getHand(self):\n return str(self.hand.cards)\n\n def draw(self, num = 1):\n if unplayedDeck.isEmpty():\n saveCard = playedDeck.draw()\n while not playedDeck.isEmpty():\n unplayedDeck.addToDeck(playedDeck.draw())\n unplayedDeck.shuffle()\n playedDeck.addToDeck(saveCard)\n for i in range(num):\n self.hand.cards.append(unplayedDeck.draw())\n self.handSize += 1\n\n def takeAll(self):\n self.handSize += len(playedDeck.cards)\n self.hand.cards += playedDeck.cards\n playedDeck.cards[:] = []\n\n def playCards(self, cards):\n # cards is an array because there is a possiblity of multiple cards on one play\n # they are sent to buffer because some games have a possiblity of not accepting the cards (Cheat)\n self.handSize -= len(cards)\n self.hand.cards = [x for x in self.hand.cards if x not in cards]\n" } ]
17
bivanalhar/instagram_bot
https://github.com/bivanalhar/instagram_bot
f58dcd3cd03496370d044cfe533a41c49d6b0223
95746b512c9a8b4b33f41283f743a6ca86a9d992
994e342af942712c73a15c016b984103eb491bf4
refs/heads/main
2022-12-30T13:21:30.859946
2020-10-15T03:37:28
2020-10-15T03:44:21
303,090,197
0
1
null
null
null
null
null
[ { "alpha_fraction": 0.6428059935569763, "alphanum_fraction": 0.6556907892227173, "avg_line_length": 25.358489990234375, "blob_id": "63f76a9582b029b9985a7b04301bbfd1495ac65c", "content_id": "e7bfee5e2aebc84bd25dd1eac5fa7468fb0e6e98", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1397, "license_type": "no_license", "max_line_length": 118, "num_lines": 53, "path": "/bot_instapy.py", "repo_name": "bivanalhar/instagram_bot", "src_encoding": "UTF-8", "text": "from instapy import InstaPy\nimport time\nimport sys\n\nif len(sys.argv) != 4:\n print(\"need to include file of targeted users, file of usernames we would like to handle and number of followers\")\n sys.exit(0)\n\ntry:\n followCount = int(sys.argv[3])\nexcept ValueError:\n print(\"the last input is the number of followers, and thus should be a valid integer\")\n sys.exit(0)\n\nuserFile = open(sys.argv[1], \"r\", encoding='utf-8')\ntargetFile = open(sys.argv[2], \"r\", encoding='utf-8')\n\nuserPass = {}\nfor line in userFile.readlines():\n tabs = line.strip().split()\n if len(tabs) < 2 :\n continue\n\n userPass[tabs[0]] = tabs[1]\n\nif len(userPass) == 0:\n print(\"list of username and password are empty. Please check again\")\n sys.exit(0)\n\nusername = []\nfor line in targetFile.readlines():\n users = line.strip().split()\n if len(users) > 1:\n continue\n\n username.append(users[0])\n\nif len(username) == 0:\n print(\"list of targeted users are empty. Please check again\")\n sys.exit(0)\n\nsessions = []\n\nfor user in userPass:\n sess = InstaPy(username = user, password = userPass[user])\n sess.login()\n sessions.append(sess)\n\nwhile True:\n for sess in sessions:\n start = time.time()\n sess.follow_user_followers(username, amount = followCount, sleep_delay = 1, randomize = False)\n print(\"Elapsed Time : {} seconds\".format(time.time() - start))\n" }, { "alpha_fraction": 0.754611074924469, "alphanum_fraction": 0.7634322643280029, "avg_line_length": 46.96154022216797, "blob_id": "8d313d832b79b975a878dd467769be21aa7ef997", "content_id": "5cd0f80df991ec1b6dbc639160d8d8f8f9d6f2dd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2494, "license_type": "no_license", "max_line_length": 387, "num_lines": 52, "path": "/README.md", "repo_name": "bivanalhar/instagram_bot", "src_encoding": "UTF-8", "text": "# Building Instagram Bot for Auto-Following\n\nTo clone this git, simply run the following command on your terminal\n```\ngit clone https://github.com/bivanalhar/instagram_bot.git\n```\n\nThis file will contain a guideline on how to operate the code successfully. First of all, note that you need to install all the necessary libraries enlisted inside the file named ```requirements.txt```. The method on doing that is very simple, which is to run the following command line into your terminal\n\n```\npip3 install -r requirements.txt\n```\n\nAfterwards, the code implicitly uses a library named ```Selenium```, which requires Firefox as a browser in which the code will operate. Make sure to have Firefox installed in your computer (I forgot precisely the minimum version required for this Firefox, therefore just try to install the newest version of it. Or, you may just simply update your existing Firefox into the newest one).\n\nBefore running the code, you need to make sure that the file named ```username.txt``` exists within the same folder as ```bot_instapy.py```. The format of each line inside ```username.txt``` should be like the following (each username and its respective password is separated by one space)\n\n```\n<username1> <password1>\n<username2> <password2>\n...\n```\n\nAfter that, we may simply run the following code\n\n```\npython3 bot_instapy.py <filename of usernames> <filename of target users> <number of followers>\n```\n\nBy default, the filename for usernames is ```username.txt``` and the filename for target users is ```target.txt```\n\nHere, note that those 2 fields **need to be filled in**, or otherwise you will encounter an error that will result in the early termination of this program. Several things to be noted on are the followings:\n1. There should be **at least** 1 username that is to be followed\n2. The number of accounts to follow **should be an integer**\n\nFor now, the default of the valid command line should be the following:\n```\npython3 bot_instapy.py username.txt target.txt 100\n```\n\nSeveral mistakes that may occur are the following:\n1. In the following case, user did not input the usernames to be targeted on\n```\npython3 bot_instapy.py 20\nneed to include file of targeted users, file of usernames we would like to handle and number of followers\n```\n\n2. In the following case, the number of accounts to follow is not a valid integer\n```\npython3 bot_instapy.py qabdjwqbdioq uwjwq782nb\nthe last input is the number of followers, and thus should be a valid integer\n```\n" } ]
2
moguangquan1/docker1
https://github.com/moguangquan1/docker1
591eb4653825e1ead26f156f674f5bed236755ab
753c5498faaf53e741650d73fe53c0fc76f254ca
858ede93dfb5de8d58beb1bf4e0da18488de626e
refs/heads/master
2021-07-03T14:02:53.731938
2017-09-25T09:48:04
2017-09-25T09:48:04
104,709,685
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6875, "alphanum_fraction": 0.75, "avg_line_length": 7, "blob_id": "21c06f8923a68eb07aff7fadb82ab73faf665399", "content_id": "ee456e011c1edd07f35d6dbbb266d717334b687a", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 16, "license_type": "permissive", "max_line_length": 9, "num_lines": 2, "path": "/README.md", "repo_name": "moguangquan1/docker1", "src_encoding": "UTF-8", "text": "# docker1\nxuelu\n" }, { "alpha_fraction": 0.5107274055480957, "alphanum_fraction": 0.5513291954994202, "avg_line_length": 32.23579406738281, "blob_id": "0c30189ac3c6c26155d13825ce561b1eace73a4f", "content_id": "cdf4134b028d699d1673bbf7b4a2067992fe0783", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11699, "license_type": "permissive", "max_line_length": 99, "num_lines": 352, "path": "/src/test_inject.py", "repo_name": "moguangquan1/docker1", "src_encoding": "UTF-8", "text": "from random import random\nfrom unittest import TestCase\n\nimport inject\nfrom inject import Binder, InjectorException, Injector\n\n\nclass TestBinder(TestCase):\n def test_bind(self):\n binder = Binder()\n binder.bind(int, 123)\n\n assert int in binder._bindings\n\n def test_bind__class_required(self):\n binder = Binder()\n\n self.assertRaisesRegexp(InjectorException, 'Binding key cannot be None',\n binder.bind, None, None)\n\n def test_bind__duplicate_binding(self):\n binder = Binder()\n binder.bind(int, 123)\n\n self.assertRaisesRegexp(InjectorException, \"Duplicate binding\", \n binder.bind, int, 456)\n\n def test_bind_provider(self):\n provider = lambda: 123\n binder = Binder()\n binder.bind_to_provider(int, provider)\n\n assert binder._bindings[int] is provider\n\n def test_bind_provider__provider_required(self):\n binder = Binder()\n self.assertRaisesRegexp(InjectorException, \"Provider cannot be None\",\n binder.bind_to_provider, int, None)\n\n def test_bind_constructor(self):\n constructor = lambda: 123\n binder = Binder()\n binder.bind_to_constructor(int, constructor)\n\n assert binder._bindings[int]._constructor is constructor\n\n def test_bind_constructor__constructor_required(self):\n binder = Binder()\n self.assertRaisesRegexp(InjectorException, \"Constructor cannot be None\",\n binder.bind_to_constructor, int, None)\n\n\nclass TestInjector(TestCase):\n def test_instance_binding__should_use_the_same_instance(self):\n injector = Injector(lambda binder: binder.bind(int, 123))\n instance = injector.get_instance(int)\n assert instance == 123\n\n def test_constructor_binding__should_construct_singleton(self):\n injector = Injector(lambda binder: binder.bind_to_constructor(int, random))\n instance0 = injector.get_instance(int)\n instance1 = injector.get_instance(int)\n\n assert instance0 == instance1\n\n def test_provider_binding__should_call_provider_for_each_injection(self):\n injector = Injector(lambda binder: binder.bind_to_provider(int, random))\n instance0 = injector.get_instance(int)\n instance1 = injector.get_instance(int)\n assert instance0 != instance1\n\n\n def test_runtime_binding__should_create_runtime_singleton(self):\n class MyClass(object):\n pass\n\n injector = Injector()\n instance0 = injector.get_instance(MyClass)\n instance1 = injector.get_instance(MyClass)\n\n assert instance0 is instance1\n assert isinstance(instance0, MyClass)\n\n def test_runtime_binding__not_callable(self):\n injector = Injector()\n self.assertRaisesRegexp(InjectorException, \n 'Cannot create a runtime binding, the key is not callable, key=123',\n injector.get_instance, 123)\n\n\nclass TestInject(TestCase):\n def tearDown(self):\n inject.clear()\n\n def test_configure__should_create_injector(self):\n injector0 = inject.configure()\n injector1 = inject.get_injector()\n assert injector0\n assert injector0 is injector1\n\n def test_configure__should_add_bindings(self):\n injector = inject.configure(lambda binder: binder.bind(int, 123))\n instance = injector.get_instance(int)\n assert instance == 123\n\n def test_configure__already_configured(self):\n inject.configure()\n\n self.assertRaisesRegexp(InjectorException, 'Injector is already configured',\n inject.configure)\n \n def test_configure_once__should_create_injector(self):\n injector = inject.configure_once()\n assert inject.get_injector() is injector\n \n def test_configure_once__should_return_existing_injector_when_present(self):\n injector0 = inject.configure()\n injector1 = inject.configure_once()\n assert injector0 is injector1\n \n def test_is_configured__should_return_true_when_injector_present(self):\n assert inject.is_configured() is False\n \n inject.configure()\n assert inject.is_configured() is True\n \n inject.clear()\n assert inject.is_configured() is False\n\n def test_clear_and_configure(self):\n injector0 = inject.configure()\n injector1 = inject.clear_and_configure() # No exception.\n assert injector0\n assert injector1\n assert injector1 is not injector0\n\n def test_get_injector_or_die(self):\n self.assertRaisesRegexp(InjectorException, 'No injector is configured',\n inject.get_injector_or_die)\n\n def test_instance(self):\n inject.configure(lambda binder: binder.bind(int, 123))\n instance = inject.instance(int)\n assert instance == 123\n\n def test_attr(self):\n class MyClass(object):\n field = inject.attr(int)\n\n inject.configure(lambda binder: binder.bind(int, 123))\n my = MyClass()\n value0 = my.field\n value1 = my.field\n\n assert value0 == 123\n assert value1 == 123\n\n def test_class_attr(self):\n class MyClass(object):\n field = inject.attr(int)\n\n inject.configure(lambda binder: binder.bind(int, 123))\n value0 = MyClass.field\n value1 = MyClass.field\n\n assert value0 == 123\n assert value1 == 123\n\n def test_param_by_name(self):\n @inject.param('val')\n def test_func(val=None):\n return val\n\n inject.configure(lambda binder: binder.bind('val', 123))\n\n assert test_func() == 123\n assert test_func(val=321) == 321\n\n def test_param_by_class(self):\n @inject.param('val', int)\n def test_func(val):\n return val\n\n inject.configure(lambda binder: binder.bind(int, 123))\n\n assert test_func() == 123\n\n def test_params(self):\n @inject.params(val=int)\n def test_func(val):\n return val\n\n inject.configure(lambda binder: binder.bind(int, 123))\n\n assert test_func() == 123\n assert test_func(321) == 321\n assert test_func(val=42) == 42\n\n def test_params_multi(self):\n @inject.params(a='A', b='B', c='C')\n def test_func(a, b, c):\n return a, b, c\n\n def config(binder):\n binder.bind('A', 1)\n binder.bind('B', 2)\n binder.bind('C', 3)\n \n inject.configure(config)\n\n assert test_func() == (1, 2, 3)\n assert test_func(10) == (10, 2, 3)\n assert test_func(10, 20) == (10, 20, 3)\n assert test_func(10, 20, 30) == (10, 20, 30)\n assert test_func(a='a') == ('a', 2, 3)\n assert test_func(b='b') == (1, 'b', 3)\n assert test_func(c='c') == (1, 2, 'c')\n assert test_func(a=10, c=30) == (10, 2, 30)\n assert test_func(c=30, b=20, a=10) == (10, 20, 30)\n assert test_func(10, b=20) == (10, 20, 3)\n\n def test_params_with_defaults(self):\n # note the inject overrides default parameters\n @inject.params(b='B', c='C')\n def test_func(a=1, b=None, c=300):\n return a, b, c\n\n def config(binder):\n binder.bind('B', 2)\n binder.bind('C', 3)\n \n inject.configure(config)\n\n assert test_func() == (1, 2, 3)\n assert test_func(10) == (10, 2, 3)\n assert test_func(10, 20) == (10, 20, 3)\n assert test_func(10, 20, 30) == (10, 20, 30)\n assert test_func(a='a') == ('a', 2, 3)\n assert test_func(b='b') == (1, 'b', 3)\n assert test_func(c='c') == (1, 2, 'c')\n assert test_func(a=10, c=30) == (10, 2, 30)\n assert test_func(c=30, b=20, a=10) == (10, 20, 30)\n assert test_func(10, b=20) == (10, 20, 3)\n \n def test_params_on_method(self):\n class Test:\n @inject.params(b='B', c='C')\n def func(self, a=1, b=None, c=None):\n return self, a, b, c\n\n def config(binder):\n binder.bind('B', 2)\n binder.bind('C', 3)\n \n inject.configure(config)\n test = Test()\n \n assert test.func() == (test, 1, 2, 3)\n assert test.func(10) == (test, 10, 2, 3)\n assert test.func(10, 20) == (test, 10, 20, 3)\n assert test.func(10, 20, 30) == (test, 10, 20, 30)\n assert test.func(a='a') == (test, 'a', 2, 3)\n assert test.func(b='b') == (test, 1, 'b', 3)\n assert test.func(c='c') == (test, 1, 2, 'c')\n assert test.func(a=10, c=30) == (test, 10, 2, 30)\n assert test.func(c=30, b=20, a=10) == (test, 10, 20, 30)\n assert test.func(10, b=20) == (test, 10, 20, 3)\n\n def test_params_on_classmethod(self):\n class Test:\n # note inject must be *before* classmethod!\n @classmethod\n @inject.params(b='B', c='C')\n def func(cls, a=1, b=None, c=None):\n return cls, a, b, c\n\n def config(binder):\n binder.bind('B', 2)\n binder.bind('C', 3)\n \n inject.configure(config)\n \n assert Test.func() == (Test, 1, 2, 3)\n assert Test.func(10) == (Test, 10, 2, 3)\n assert Test.func(10, 20) == (Test, 10, 20, 3)\n assert Test.func(10, 20, 30) == (Test, 10, 20, 30)\n assert Test.func(a='a') == (Test, 'a', 2, 3)\n assert Test.func(b='b') == (Test, 1, 'b', 3)\n assert Test.func(c='c') == (Test, 1, 2, 'c')\n assert Test.func(a=10, c=30) == (Test, 10, 2, 30)\n assert Test.func(c=30, b=20, a=10) == (Test, 10, 20, 30)\n assert Test.func(10, b=20) == (Test, 10, 20, 3)\n\n def test_params_on_classmethod_ob_object(self):\n class Test:\n # note inject must be *before* classmethod!\n @classmethod\n @inject.params(b='B', c='C')\n def func(cls, a=1, b=None, c=None):\n return cls, a, b, c\n\n def config(binder):\n binder.bind('B', 2)\n binder.bind('C', 3)\n \n inject.configure(config)\n test = Test\n \n assert test.func() == (Test, 1, 2, 3)\n assert test.func(10) == (Test, 10, 2, 3)\n assert test.func(10, 20) == (Test, 10, 20, 3)\n assert test.func(10, 20, 30) == (Test, 10, 20, 30)\n assert test.func(a='a') == (Test, 'a', 2, 3)\n assert test.func(b='b') == (Test, 1, 'b', 3)\n assert test.func(c='c') == (Test, 1, 2, 'c')\n assert test.func(a=10, c=30) == (Test, 10, 2, 30)\n assert test.func(c=30, b=20, a=10) == (Test, 10, 20, 30)\n assert test.func(10, b=20) == (Test, 10, 20, 3)\n\n \nclass TestFunctional(TestCase):\n def tearDown(self):\n inject.clear()\n \n def test(self):\n class Config(object):\n def __init__(self, greeting):\n self.greeting = greeting\n \n class Cache(object):\n config = inject.attr(Config)\n \n def load_greeting(self):\n return self.config.greeting\n \n class User(object):\n cache = inject.attr(Cache)\n \n def __init__(self, name):\n self.name = name\n \n def greet(self):\n return '%s, %s' % (self.cache.load_greeting(), self.name)\n \n def config(binder):\n binder.bind(Config, Config('Hello'))\n \n inject.configure(config)\n \n user = User('John Doe')\n greeting = user.greet()\n assert greeting == 'Hello, John Doe'\n" } ]
2
ch-kr/gnomad_qc
https://github.com/ch-kr/gnomad_qc
4a41a5a9cd94e9453815b42c5ebbf128e42ad8db
25a81bc2166fbe4ccbb2f7a87d36aba661150413
f10768def6300413ad971c1aec7062554a9d92c8
refs/heads/master
2020-04-26T19:45:33.982964
2019-02-13T02:02:53
2019-02-13T02:02:53
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5564004778862, "alphanum_fraction": 0.5717363953590393, "avg_line_length": 51.59333419799805, "blob_id": "df0af6381ece81ac28583e37e022d9e18098fc4d", "content_id": "9231164e16d2beee5ef26530b16eeb0e68601a96", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7890, "license_type": "no_license", "max_line_length": 156, "num_lines": 150, "path": "/load_data/load_coverage.py", "repo_name": "ch-kr/gnomad_qc", "src_encoding": "UTF-8", "text": "\nfrom gnomad_hail import *\n\n\ndef main(args):\n data_type = 'exomes' if args.exomes else 'genomes'\n num_partitions = 1000 # if args.exomes else 10000\n hl.init(min_block_size=0, log='/load_coverage.log')\n\n source_root = f'gs://gnomad/coverage/source/{data_type}'\n root = f'gs://gnomad/coverage/hail-0.2/{data_type}'\n\n all_file_data = []\n with hl.hadoop_open(f'{source_root}/coverage_files.txt', 'r') as f:\n for file_data in f:\n fname, anchored, sample_data = file_data.strip().split('\\t', 2)\n base = fname.split('.txt')[0]\n anchored = anchored == 'anchored'\n sample_data = sample_data.split('\\t')\n all_file_data.append([base, anchored, sample_data])\n\n sample_count = 9733 if args.exomes else 1279\n assert sum([len(x[2]) for x in all_file_data]) == sample_count\n\n meta_kt = get_gnomad_meta(data_type, full_meta=True)\n bam_dict = dict(get_sample_data(meta_kt, [meta_kt.bam, meta_kt.s]))\n\n assert all([all([y in bam_dict for y in x[2]]) for x in all_file_data])\n\n # Modify all_file_data in place\n for file_data in all_file_data:\n for i in range(len(file_data[2])):\n file_data[2][i] = bam_dict[file_data[2][i]]\n\n if args.read_coverage_files:\n # 20-30 seconds each for exomes, 2m45s for genomes\n for base, anchored, sample_data in all_file_data:\n fname = f'{source_root}/parts/full_{base}.gz'\n print('Loading:', fname)\n if anchored:\n mt = hl.import_matrix_table(fname, no_header=True, row_fields={'f0': hl.tstr, 'f1': hl.tint}, min_partitions=num_partitions, force_bgz=True)\n mt = mt.transmute_rows(locus=hl.locus(mt.f0, mt.f1))\n else:\n mt = hl.import_matrix_table(fname, no_header=True, min_partitions=num_partitions, force_bgz=True)\n mt = mt.key_cols_by(s=hl.literal(sample_data)[mt.col_id]).drop('col_id')\n mt.transmute_entries(coverage=mt.x).write(f'{root}/parts/part_{base}.mt', args.overwrite)\n\n if args.merge_coverage_mts:\n # Exomes: first merges are ~7.5 mins each, final one is ~1.5 hours (on 40 n1-standard-8s)\n chunks = int(len(all_file_data) ** 0.5) + 1\n for i in range(chunks):\n if i * chunks >= len(all_file_data): break\n base, anchored, sample_data = all_file_data[i * chunks]\n mt = hl.read_matrix_table(f'{root}/parts/part_{base}.mt')\n if i:\n mt = mt.select_rows()\n else:\n assert anchored\n for j in range(1, chunks):\n if i * chunks + j >= len(all_file_data): break\n base, anchored, sample_data = all_file_data[i * chunks + j]\n next_mt = hl.read_matrix_table(f'{root}/parts/part_{base}.mt').select_rows()\n mt = mt.union_cols(next_mt)\n mt.write(f'{root}/intermediates/intermediate_{i}.mt', args.overwrite)\n\n mt = hl.read_matrix_table(f'{root}/intermediates/intermediate_0.mt')\n for i in range(1, chunks):\n try:\n next_mt = hl.read_matrix_table(f'{root}/intermediates/intermediate_{i}.mt')\n mt = mt.union_cols(next_mt)\n except Exception:\n pass\n # This part has some trouble for genomes\n if args.exomes:\n mt.write(f'{root}/intermediates/final_intermediate.mt', args.overwrite)\n mt = hl.read_matrix_table(f'{root}/intermediates/final_intermediate.mt')\n mt = mt.key_rows_by('locus')\n mt.write(coverage_mt_path(data_type), args.overwrite)\n\n if args.aggregate_coverage:\n mt = hl.read_matrix_table(coverage_mt_path(data_type))\n meta_ht = get_gnomad_meta(data_type)\n mt = mt.filter_cols(meta_ht[mt.s].release)\n mt = mt.annotate_rows(mean=hl.agg.mean(mt.coverage),\n median=hl.median(hl.agg.collect(mt.coverage)),\n over_1=hl.agg.fraction(mt.coverage >= 1),\n over_5=hl.agg.fraction(mt.coverage >= 5),\n over_10=hl.agg.fraction(mt.coverage >= 10),\n over_15=hl.agg.fraction(mt.coverage >= 15),\n over_20=hl.agg.fraction(mt.coverage >= 20),\n over_25=hl.agg.fraction(mt.coverage >= 25),\n over_30=hl.agg.fraction(mt.coverage >= 30),\n over_50=hl.agg.fraction(mt.coverage >= 50),\n over_100=hl.agg.fraction(mt.coverage >= 100))\n ht = mt.rows()\n if args.exomes:\n ht.write(coverage_ht_path(data_type), args.overwrite)\n else:\n ht.write(f'{root}/intermediates/final_aggregated.ht', args.overwrite)\n ht = hl.read_table(f'{root}/intermediates/final_aggregated.ht')\n ht.key_by('locus').write(coverage_ht_path(data_type), args.overwrite)\n\n if args.aggregate_coverage_pops:\n mt = hl.read_matrix_table(coverage_mt_path(data_type))\n meta_ht = get_gnomad_meta(data_type)\n mt = mt.annotate_cols(meta=meta_ht[mt.s])\n mt = mt.filter_cols(mt.meta.release)\n agg_ds = (mt\n .group_cols_by(mt.meta.pop, mt.meta.qc_platform)\n .aggregate(mean=hl.agg.mean(mt.coverage),\n median=hl.median(hl.agg.collect(mt.coverage)),\n over_1=hl.agg.fraction(mt.coverage >= 1),\n over_5=hl.agg.fraction(mt.coverage >= 5),\n over_10=hl.agg.fraction(mt.coverage >= 10),\n over_15=hl.agg.fraction(mt.coverage >= 15),\n over_20=hl.agg.fraction(mt.coverage >= 20),\n over_25=hl.agg.fraction(mt.coverage >= 25),\n over_30=hl.agg.fraction(mt.coverage >= 30),\n over_50=hl.agg.fraction(mt.coverage >= 50),\n over_100=hl.agg.fraction(mt.coverage >= 100)))\n agg_ds.write(coverage_mt_path(data_type, by_population=True, by_platform=True), args.overwrite)\n\n if args.export_coverage:\n ht = hl.read_table(coverage_ht_path(data_type)).key_by()\n ht = ht.transmute(chrom=ht.locus.contig, pos=ht.locus.position).select(\n 'chrom', 'pos', *list(ht.drop('locus', 'row_id').row))\n ht.export(coverage_ht_path(data_type).replace('.ht', '.tsv.bgz'))\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser()\n parser.add_argument('--exomes', help='Run on exomes. One and only one of --exomes or --genomes is required.',\n action='store_true')\n parser.add_argument('--genomes', help='Run on genomes. One and only one of --exomes or --genomes is required.',\n action='store_true')\n parser.add_argument('--read_coverage_files', help='Read raw coverage .gz files', action='store_true')\n parser.add_argument('--merge_coverage_mts', help='Merge individual coverage .mt files', action='store_true')\n parser.add_argument('--aggregate_coverage', help='Aggregate coverage data', action='store_true')\n parser.add_argument('--aggregate_coverage_pops', help='Aggregate coverage data', action='store_true')\n parser.add_argument('--export_coverage', help='Export coverage data', action='store_true')\n parser.add_argument('--slack_channel', help='Slack channel to post results and notifications to.')\n parser.add_argument('--overwrite', help='Overwrite data', action='store_true')\n args = parser.parse_args()\n\n if int(args.exomes) + int(args.genomes) != 1:\n sys.exit('Error: One and only one of --exomes or --genomes must be specified.')\n\n if args.slack_channel:\n try_slack(args.slack_channel, main, args)\n else:\n main(args)\n" } ]
1
jkmiao/label_code
https://github.com/jkmiao/label_code
b2075ca45d61207e89640f548fdf30ddfd7addb3
ae98530febe07932d21445653fcd6b0c4d3e50e6
c98e1a7e399fdba892290de6b00007e11a664fbc
refs/heads/master
2021-01-21T21:06:17.522780
2017-06-19T13:09:08
2017-06-19T13:09:08
94,777,727
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.6130374670028687, "alphanum_fraction": 0.6234396696090698, "avg_line_length": 27.799999237060547, "blob_id": "d61c2fc10057fac520ac73c0843dbbb54fbd3826", "content_id": "81d9ee6b43fa2d6a44a79731c769c0e7e7ae4373", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1478, "license_type": "no_license", "max_line_length": 76, "num_lines": 50, "path": "/label_code/web_server.py", "repo_name": "jkmiao/label_code", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# encoding: utf-8\n\nimport tornado.httpserver\nimport tornado.options\nimport tornado.web\nfrom tornado.options import define,options\nimport os\nfrom apicode import Apicode\n\ndefine(\"port\", default=8088, help=\"run on the given port\", type=int)\n\nclass VcodeLabelHandler(tornado.web.RequestHandler):\n def get(self):\n\n labelMsg = vcode.get_labelmsg()\n\n self.render('label_code.html', **labelMsg)\n\n def post(self):\n\n result = self.get_argument('result', '')\n fname = self.get_argument('fname', '')\n\n # 验证输入\n if fname and len(result)>1:\n print 'fname',fname\n labelMsg = vcode.get_labelmsg(fname, result)\n self.render('label_code.html', **labelMsg)\n else:\n print '更新失败', result, fname\n self.write('<h1>error: 验证码输入必须大于1位!</h1>')\n\n\nif __name__ == \"__main__\":\n\n vcode = Apicode() \n\n tornado.options.parse_command_line()\n app = tornado.web.Application(\n handlers = [(r'/', VcodeLabelHandler),],\n template_path = os.path.join(os.path.dirname(__file__),\"templates\"),\n static_path = os.path.join(os.path.dirname(__file__), \"static\"),\n debug=True,\n autoescape=None,\n )\n http_server = tornado.httpserver.HTTPServer(app)\n http_server.listen(options.port)\n print \"starting tornado at port http://127.0.0.1:%d\" % options.port\n tornado.ioloop.IOLoop.instance().start()\n\n\n" }, { "alpha_fraction": 0.5777202248573303, "alphanum_fraction": 0.6632124185562134, "avg_line_length": 17.33333396911621, "blob_id": "ecb0decf97e5226cf918e4928e96321bedfab725", "content_id": "6fa03f9ec56d8c85ecbb3858db28e09a8e27d1ce", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 554, "license_type": "no_license", "max_line_length": 58, "num_lines": 21, "path": "/README.md", "repo_name": "jkmiao/label_code", "src_encoding": "UTF-8", "text": "\n## 验证码在线标注打码平台\n<code>原理:</code> 利用用户的输入标签, 将static/uploads 文件夹下的验证码图片重命名.\n\n## 使用\n1. 将需要打标的验证码图片复制到 ./static/uploads 文件夹下, 打完标再 mv 移走\n2. 下载到本地:\n3. git clone '[email protected]:miaoweihong/label_code.git'\n4. cd label_code\n\n```python web_server.py ```\n\n打开浏览器: http://192.168.0.170:8088/ 按照说明, 进行标注\n\n----\n\n![标注示例](label_code/demo_img.jpg 'demo example') \n\n\n----\n>> by- [email protected]\n>> @2017.06\n" }, { "alpha_fraction": 0.5592625141143799, "alphanum_fraction": 0.5645303130149841, "avg_line_length": 32.5, "blob_id": "b069b586e405182d0a66254bb924e125369e9dc8", "content_id": "e56bb126227fb692beafcd860f85c8a8ed1a8c6f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1151, "license_type": "no_license", "max_line_length": 97, "num_lines": 34, "path": "/label_code/apicode.py", "repo_name": "jkmiao/label_code", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n\nimport os\nimport random \n\nclass Apicode(object):\n\n def __init__(self, path='./static/uploads'):\n totalImg = [os.path.join(path, fname) for fname in os.listdir(path)]\n finished = [fname for fname in totalImg if fname.find('_')>1 ]\n self.unfinished = [fname for fname in totalImg if fname not in finished]\n \n self.label_msg = {}\n self.label_msg['city'] = '探迹'\n self.label_msg['totalCnt'] = len(totalImg)\n self.label_msg['finishedCnt'] = len(finished)\n\n\n def get_labelmsg(self, fname='', result=''):\n\n if fname in self.unfinished and len(result)>1:\n newName = './static/uploads/%s_%s.jpg' % (result, fname.split('/')[-1].split('.')[0])\n if os.path.exists(fname):\n os.rename(fname, newName) # 更新名字\n self.unfinished.remove(fname)\n self.label_msg['finishedCnt'] += 1\n \n if self.unfinished:\n self.label_msg['fname'] = random.choice(self.unfinished)\n else:\n self.label_msg['fname'] = 'none'\n\n return self.label_msg\n" } ]
3
mmk-uk/HandShape-Recognition-with-Python-Leapmotion
https://github.com/mmk-uk/HandShape-Recognition-with-Python-Leapmotion
21c2184e5febb5a5b5a46cd5afcf9c644edffbfe
1dace8782592e0840d3d472aa9cc18feda848d37
07739718b6ad0b39cdc3ff23a6a0a84e798a9645
refs/heads/master
2020-06-06T08:46:56.593653
2019-06-19T08:44:42
2019-06-19T08:44:42
192,692,964
2
3
null
null
null
null
null
[ { "alpha_fraction": 0.5869497060775757, "alphanum_fraction": 0.6253512501716614, "avg_line_length": 26.852174758911133, "blob_id": "8868b134a4e00ae1ac38cf1682e036b6f700883b", "content_id": "1e0e914823f0dabdd1ca5329aef14414aea6c570", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4111, "license_type": "no_license", "max_line_length": 85, "num_lines": 115, "path": "/binary.py", "repo_name": "mmk-uk/HandShape-Recognition-with-Python-Leapmotion", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport cv2\nimport numpy as np\nfrom scipy import stats\n\n# 手の認識用パラメータ(HチャンネルとSチャンネルとを二値化するための条件)\nhmin = 0\nhmax = 30 # 15-40程度にセット\nsmin = 50\n\n# グー、チョキ、パーのファイルの個数を格納する変数\ngu_file_count = 0\nchoki_file_count = 0\npa_file_count = 0\n\n# 二値化された画像を保存するための関数(自分の手の画像で学習したい方のみ用いる)\ndef save_hand(mode, img):\n global gu_file_count\n global choki_file_count\n global pa_file_count\n\n if mode == 'g':\n filename = 'img_gu{0:03d}.png'.format(gu_file_count)\n print('saving {0}'.format(filename))\n cv2.imwrite(filename, img)\n gu_file_count += 1\n elif mode == 'c':\n filename = 'img_choki{0:03d}.png'.format(choki_file_count)\n print('saving {0}'.format(filename))\n cv2.imwrite(filename, img)\n choki_file_count += 1\n elif mode == 'p':\n filename = 'img_pa{0:03d}.png'.format(pa_file_count)\n print('saving {0}'.format(filename))\n cv2.imwrite(filename, img)\n pa_file_count += 1\n\ncap = cv2.VideoCapture(0)\n\n# カメラの解像度を320x240にセット\ncap.set(3, 320) # 横サイズ\ncap.set(4, 240) # 縦サイズ\n# カメラのフレームレートを15fpsにセット\ncap.set(cv2.CAP_PROP_FPS, 15)\n\n\nwhile True:\n ret, frame = cap.read()\n if ret == False:\n break\n #frame <type 'numpy.ndarray'>\n\n # 映像データをHSV形式に変換\n hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)\n\n # HSV形式からHチャンネルとSチャンネルの画像を得る\n hsv_channels = cv2.split(hsv)\n h_channel = hsv_channels[0]\n s_channel = hsv_channels[1]\n\n # Hチャンネルを平滑化\n h_binary = cv2.GaussianBlur(h_channel, (5,5), 0)\n # Hチャンネルの二値化画像を作成\n # hmin~hmaxの範囲を255(白)に、それ以外を0(黒)に\n ret,h_binary = cv2.threshold(h_binary, hmax, 255, cv2.THRESH_TOZERO_INV)\n ret,h_binary = cv2.threshold(h_binary, hmin, 255, cv2.THRESH_BINARY)\n # Sチャンネルの二値化画像を作成\n # smin~255の範囲を255(白)に、それ以外を0に(黒)に\n ret,s_binary = cv2.threshold(s_channel, smin, 255, cv2.THRESH_BINARY)\n\n # HチャンネルとSチャンネルの二値化画像のANDをとる\n # HチャンネルとSチャンネルの両方で255(白)の領域のみ白となる\n hs_and = h_binary & s_binary\n\n # 以下、最も広い白領域のみを残すための計算\n # まず、白領域の塊(クラスター)にラベルを振る\n img_dist, img_label = cv2.distanceTransformWithLabels(255-hs_and, cv2.DIST_L2, 5)\n img_label = np.uint8(img_label) & hs_and\n # ラベル0は黒領域なので除外\n img_label_not_zero = img_label[img_label != 0]\n # 最も多く現れたラベルが最も広い白領域のラベル\n if len(img_label_not_zero) != 0:\n m = stats.mode(img_label_not_zero)[0]\n else:\n m = 0\n # 最も広い白領域のみを残す\n hand = np.uint8(img_label == m)*255\n\n # 表示して動作チェックするため h_channel, s_channel, h_binary, s_binary を結合\n hs = np.concatenate((h_channel, h_binary), axis=0)\n hs_bin = np.concatenate((s_channel, s_binary), axis=0)\n hs_final = np.concatenate((hs_and, hand), axis=0)\n hs_all = np.concatenate((hs, hs_bin, hs_final), axis=1)\n\n # 得られた二値化画像を画面に表示\n cv2.imshow('hand', hand)\n \n # 動作チェック用の画像を画面に表示\n #cv2.imshow('HS', hs_all)\n #cv2.imshow('frame', stream.array)\n\n # 'q'を入力でアプリケーション終了\n # 'g', 'c', 'p'のときは画像保存\n key = cv2.waitKey(1)\n if key & 0xFF == ord('q'):\n break\n elif key & 0xFF == ord('g'):\n save_hand('g', hand)\n elif key & 0xFF == ord('c'):\n save_hand('c', hand)\n elif key & 0xFF == ord('p'):\n save_hand('p', hand)\n\ncap.release()\ncv2.destroyAllWindows()\n" }, { "alpha_fraction": 0.5568070411682129, "alphanum_fraction": 0.5804766416549683, "avg_line_length": 34.410404205322266, "blob_id": "abca36ac62c1c5c94e3865adc7cdbf18cbaa6bee", "content_id": "dcaa3630a28040aafcf476eeb4e7a1cb16f8ff6e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6944, "license_type": "no_license", "max_line_length": 97, "num_lines": 173, "path": "/LeapImageRecognition.py", "repo_name": "mmk-uk/HandShape-Recognition-with-Python-Leapmotion", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport cv2, Leap, math, ctypes\nimport sys\nimport numpy as np\nfrom scipy import stats\nfrom sklearn.neural_network import MLPClassifier\nfrom sklearn.externals import joblib\nimport time\n\n# 学習に用いる縮小画像のサイズ\nsw = 48\nsh = 36\n\n# 手の認識用パラメータ(HチャンネルとSチャンネルとを二値化するための条件)\nhmin = 0\nhmax = 30 # 15-40程度にセット\nsmin = 50\n\njanken_class = ['グー', 'チョキ', 'パー']\n\n# 学習済ファイルの確認\nif len(sys.argv)==2:\n savefile = sys.argv[1]\n try:\n clf = joblib.load(savefile)\n except IOError:\n print('学習済ファイル{0}を開けません'.format(savefile))\n sys.exit()\nelse:\n print('使用法: python ml-08-04-recognition.py 学習済ファイル.pkl')\n sys.exit()\n\ndef getImageVector(img):\n # 白い領域(ピクセル値が0でない領域)の座標を集める\n nonzero = cv2.findNonZero(img)\n # その領域を囲う四角形の座標と大きさを取得\n xx, yy, ww, hh = cv2.boundingRect(nonzero)\n # 白い領域を含む最小の矩形領域を取得\n img_nonzero = img[yy:yy+hh, xx:xx+ww]\n cv2.imshow('img_nonzero', img_nonzero)\n # 白い領域を(sw, sh)サイズに縮小するための準備\n img_small = np.zeros((sh, sw), dtype=np.uint8)\n # 画像のアスペクト比を保ったまま、白い領域を縮小してimg_smallにコピーする\n if 4*hh < ww*3 and hh > 0:\n htmp = int(sw*hh/ww)\n if htmp>0:\n img_small_tmp = cv2.resize(img_nonzero, (sw, htmp), interpolation=cv2.INTER_LINEAR)\n img_small[(sh-htmp)//2:(sh-htmp)//2+htmp, 0:sw] = img_small_tmp\n elif 4*hh >= ww*3 and ww > 0:\n wtmp = int(sh*ww/hh)\n if wtmp>0:\n img_small_tmp = cv2.resize(img_nonzero, (wtmp, sh), interpolation=cv2.INTER_LINEAR)\n img_small[0:sh, (sw-wtmp)//2:(sw-wtmp)//2+wtmp] = img_small_tmp\n # 0...1の範囲にスケーリングしてからリターンする\n cv2.imshow('img_small', img_small)\n return np.array([img_small.ravel()/255.])\n\ndef convert_distortion_maps(image):\n\n distortion_length = image.distortion_width * image.distortion_height\n xmap = np.zeros(distortion_length/2, dtype=np.float32)\n ymap = np.zeros(distortion_length/2, dtype=np.float32)\n\n for i in range(0, distortion_length, 2):\n xmap[distortion_length/2 - i/2 - 1] = image.distortion[i] * image.width\n ymap[distortion_length/2 - i/2 - 1] = image.distortion[i + 1] * image.height\n\n xmap = np.reshape(xmap, (image.distortion_height, image.distortion_width/2))\n ymap = np.reshape(ymap, (image.distortion_height, image.distortion_width/2))\n\n #resize the distortion map to equal desired destination image size\n resized_xmap = cv2.resize(xmap,\n (image.width, image.height),\n 0, 0,\n cv2.INTER_LINEAR)\n resized_ymap = cv2.resize(ymap,\n (image.width, image.height),\n 0, 0,\n cv2.INTER_LINEAR)\n\n #Use faster fixed point maps\n coordinate_map, interpolation_coefficients = cv2.convertMaps(resized_xmap,\n resized_ymap,\n cv2.CV_32FC1,\n nninterpolation = False)\n\n return coordinate_map, interpolation_coefficients\n\ndef undistort(image, coordinate_map, coefficient_map, width, height):\n destination = np.empty((width, height), dtype = np.ubyte)\n\n #wrap image data in numpy array\n i_address = int(image.data_pointer)\n ctype_array_def = ctypes.c_ubyte * image.height * image.width\n # as ctypes array\n as_ctype_array = ctype_array_def.from_address(i_address)\n # as numpy array\n as_numpy_array = np.ctypeslib.as_array(as_ctype_array)\n img = np.reshape(as_numpy_array, (image.height, image.width))\n\n #remap image to destination\n destination = cv2.remap(img,\n coordinate_map,\n coefficient_map,\n interpolation = cv2.INTER_LINEAR)\n\n #resize output to desired destination size\n destination = cv2.resize(destination,\n (width, height),\n 0, 0,\n cv2.INTER_LINEAR)\n return destination\n\ndef run(controller):\n maps_initialized = False\n while(True):\n #sleepで更新速度を制御\n time.sleep(0.1)\n frame = controller.frame()\n image = frame.images[0]\n if image.is_valid:\n if not maps_initialized:\n left_coordinates, left_coefficients = convert_distortion_maps(frame.images[0])\n right_coordinates, right_coefficients = convert_distortion_maps(frame.images[1])\n maps_initialized = True\n\n undistorted_left = undistort(image, left_coordinates, left_coefficients, 400, 400)\n undistorted_right = undistort(image, right_coordinates, right_coefficients, 400, 400)\n\n #画像を2値化(白黒に処理)\n ret,hand = cv2.threshold(undistorted_right,80,255,cv2.THRESH_BINARY)\n\n my_hand = hand[80:320,40:360]\n\n # 以下、最も広い白領域のみを残すための計算\n # まず、白領域の塊(クラスター)にラベルを振る\n img_dist, img_label = cv2.distanceTransformWithLabels(255-my_hand, cv2.DIST_L2, 5)\n img_label = np.uint8(img_label) & my_hand\n # ラベル0は黒領域なので除外\n img_label_not_zero = img_label[img_label != 0]\n # 最も多く現れたラベルが最も広い白領域のラベル\n if len(img_label_not_zero) != 0:\n m = stats.mode(img_label_not_zero)[0]\n else:\n m = 0\n # 最も広い白領域のみを残す\n this_hand = np.uint8(img_label == m)*255\n\n # 最大の白領域からscikit-learnに入力するためのベクトルを取得\n hand_vector = getImageVector(this_hand)\n\n # 学習済のニューラルネットワークから分類結果を取得\n result = clf.predict(hand_vector)\n # 分類結果を表示\n print(janken_class[result[0]])\n\n # 得られた二値化画像を画面に表示\n cv2.imshow('hand', this_hand)\n\n if cv2.waitKey(1) & 0xFF == ord('q'):\n break\n\ndef main():\n controller = Leap.Controller()\n controller.set_policy_flags(Leap.Controller.POLICY_IMAGES)\n print('認識を開始します')\n try:\n run(controller)\n except KeyboardInterrupt:\n sys.exit(0)\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.5283592939376831, "alphanum_fraction": 0.5555555820465088, "avg_line_length": 36.763511657714844, "blob_id": "5bf2f86b602fac4e98cec39b62c794030922c2b0", "content_id": "ca32e14ae276bd77584896621915c97ad15e84ae", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5911, "license_type": "no_license", "max_line_length": 102, "num_lines": 148, "path": "/LeapImageGet2.py", "repo_name": "mmk-uk/HandShape-Recognition-with-Python-Leapmotion", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport cv2, Leap, math, ctypes\nimport numpy as np\nimport time\nfrom scipy import stats\n\ndef convert_distortion_maps(image):\n\n distortion_length = image.distortion_width * image.distortion_height\n xmap = np.zeros(distortion_length/2, dtype=np.float32)\n ymap = np.zeros(distortion_length/2, dtype=np.float32)\n\n for i in range(0, distortion_length, 2):\n xmap[distortion_length/2 - i/2 - 1] = image.distortion[i] * image.width\n ymap[distortion_length/2 - i/2 - 1] = image.distortion[i + 1] * image.height\n\n xmap = np.reshape(xmap, (image.distortion_height, image.distortion_width/2))\n ymap = np.reshape(ymap, (image.distortion_height, image.distortion_width/2))\n\n #resize the distortion map to equal desired destination image size\n resized_xmap = cv2.resize(xmap,\n (image.width, image.height),\n 0, 0,\n cv2.INTER_LINEAR)\n resized_ymap = cv2.resize(ymap,\n (image.width, image.height),\n 0, 0,\n cv2.INTER_LINEAR)\n\n #Use faster fixed point maps\n coordinate_map, interpolation_coefficients = cv2.convertMaps(resized_xmap,\n resized_ymap,\n cv2.CV_32FC1,\n nninterpolation = False)\n\n return coordinate_map, interpolation_coefficients\n\ndef undistort(image, coordinate_map, coefficient_map, width, height):\n destination = np.empty((width, height), dtype = np.ubyte)\n\n #wrap image data in numpy array\n i_address = int(image.data_pointer)\n ctype_array_def = ctypes.c_ubyte * image.height * image.width\n # as ctypes array\n as_ctype_array = ctype_array_def.from_address(i_address)\n # as numpy array\n as_numpy_array = np.ctypeslib.as_array(as_ctype_array)\n img = np.reshape(as_numpy_array, (image.height, image.width))\n\n #remap image to destination\n destination = cv2.remap(img,\n coordinate_map,\n coefficient_map,\n interpolation = cv2.INTER_LINEAR)\n\n #resize output to desired destination size\n destination = cv2.resize(destination,\n (width, height),\n 0, 0,\n cv2.INTER_LINEAR)\n return destination\n\ndef run(controller):\n maps_initialized = False\n count = 308\n while(True):\n #sleepで更新速度を制御\n time.sleep(0.4)\n frame = controller.frame()\n image = frame.images[0]\n if image.is_valid:\n if not maps_initialized:\n left_coordinates, left_coefficients = convert_distortion_maps(frame.images[0])\n right_coordinates, right_coefficients = convert_distortion_maps(frame.images[1])\n maps_initialized = True\n\n undistorted_left = undistort(image, left_coordinates, left_coefficients, 400, 400)\n undistorted_right = undistort(image, right_coordinates, right_coefficients, 400, 400)\n\n #画像を2値化(白黒に処理)\n ret,hand = cv2.threshold(undistorted_right,70,255,cv2.THRESH_BINARY)\n\n my_hand = hand[80:320,40:360]\n\n # 以下、最も広い白領域のみを残すための計算\n # まず、白領域の塊(クラスター)にラベルを振る\n img_dist, img_label = cv2.distanceTransformWithLabels(255-my_hand, cv2.DIST_L2, 5)\n img_label = np.uint8(img_label) & my_hand\n # ラベル0は黒領域なので除外\n img_label_not_zero = img_label[img_label != 0]\n # 最も多く現れたラベルが最も広い白領域のラベル\n if len(img_label_not_zero) != 0:\n m = stats.mode(img_label_not_zero)[0]\n else:\n m = 0\n # 最も広い白領域のみを残す\n this_hand = np.uint8(img_label == m)*255\n\n #膨張\n #kernel = np.ones((5,5),np.uint8)\n #this_hand = cv2.dilate(this_hand,kernel,iterations = 1)\n\n #hand_color = cv2.cvtColor(this_hand,cv2.COLOR_GRAY2BGR)\n\n # 輪郭を抽出\n contours,hierarchy = cv2.findContours(this_hand,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)\n cnt = contours[0]\n\n #重心を求める\n mu = cv2.moments(this_hand, False)\n mx,my= int(mu[\"m10\"]/mu[\"m00\"]) , int(mu[\"m01\"]/mu[\"m00\"])\n\n #手首の位置を求める\n frame = controller.frame()\n righthand = frame.hands.rightmost\n arm = righthand.arm\n\n i_box = frame.interaction_box\n normalized_tip = i_box.normalize_point(arm.wrist_position)\n app_x = 160 * normalized_tip.x + 80\n app_y = 120 * (normalized_tip.z) + 60\n app = (int(app_x),int(app_y))\n\n #重心と手首の位置から回転させる\n angle = 90 + math.degrees(math.atan2(my-app[1],mx-app[0]))\n trans = cv2.getRotationMatrix2D((mx,my), angle , 1.0)\n this_hand = cv2.warpAffine(this_hand, trans, (360,240))\n\n\n # 得られた二値化画像を画面に表示\n cv2.imshow('hand', this_hand)\n #cv2.imshow('hand', undistorted_right)\n cv2.imwrite(\"HandImages2/img_two{0:03d}.png\".format(count),this_hand)\n count += 1\n\n if cv2.waitKey(1) & 0xFF == ord('q'):\n break\n\ndef main():\n controller = Leap.Controller()\n controller.set_policy_flags(Leap.Controller.POLICY_IMAGES)\n try:\n run(controller)\n except KeyboardInterrupt:\n sys.exit(0)\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.5798634886741638, "alphanum_fraction": 0.6184300184249878, "avg_line_length": 26.641510009765625, "blob_id": "b99dd40148b119a67215c9fe8ab85052ced3e823", "content_id": "e06f5c843cd880e804af1a268c5d87ebe03bedc4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3626, "license_type": "no_license", "max_line_length": 95, "num_lines": 106, "path": "/hand-learn4.py", "repo_name": "mmk-uk/HandShape-Recognition-with-Python-Leapmotion", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport cv2\nimport sys\nimport numpy as np\nfrom sklearn.neural_network import MLPClassifier\nfrom sklearn.externals import joblib\nimport pylab as plt\n\n# 学習に用いる縮小画像のサイズ\nsw = 160\nsh = 120\n\n# 学習結果を保存するファイルの決定\nif len(sys.argv)!=2:\n print('使用法: python ml-08-03-learn.py 保存ファイル名.pkl')\n sys.exit()\nsavefile = sys.argv[1]\n\ndef getImageVector(img):\n # 白い領域(ピクセル値が0でない領域)の座標を集める\n nonzero = cv2.findNonZero(img)\n # その領域を囲う四角形の座標と大きさを取得\n xx, yy, ww, hh = cv2.boundingRect(nonzero)\n # 白い領域を含む最小の矩形領域を取得\n img_nonzero = img[yy:yy+hh, xx:xx+ww]\n # 白い領域を(sw, sh)サイズに縮小するための準備\n img_small = np.zeros((sh, sw), dtype=np.uint8)\n # 画像のアスペクト比を保ったまま、白い領域を縮小してimg_smallにコピーする\n if 4*hh < ww*3 and hh > 0:\n htmp = int(sw*hh/ww)\n if htmp>0:\n img_small_tmp = cv2.resize(img_nonzero, (sw, htmp), interpolation=cv2.INTER_LINEAR)\n img_small[(sh-htmp)//2:(sh-htmp)//2+htmp, 0:sw] = img_small_tmp\n elif 4*hh >= ww*3 and ww > 0:\n wtmp = int(sh*ww/hh)\n if wtmp>0:\n img_small_tmp = cv2.resize(img_nonzero, (wtmp, sh), interpolation=cv2.INTER_LINEAR)\n img_small[0:sh, (sw-wtmp)//2:(sw-wtmp)//2+wtmp] = img_small_tmp\n # 0...1の範囲にスケーリングしてからリターンする\n return np.array([img_small.ravel()/255.])\n\n# X:画像から計算したベクトル、y:教師データ\nX = np.empty((0,sw*sh), float)\ny = np.array([], int)\n\n# 手の画像の読み込み\nfor hand_class in [0, 1, 2, 3, 4, 5]:\n\n # 画像番号0から999まで対応\n for i in range(1000):\n if hand_class==0:\n filename = 'HandImages2/img_zero{0:03d}.png'.format(i)\n elif hand_class==1:\n filename = 'HandImages2/img_one{0:03d}.png'.format(i)\n elif hand_class==2:\n filename = 'HandImages2/img_two{0:03d}.png'.format(i)\n elif hand_class==3:\n filename = 'HandImages2/img_three{0:03d}.png'.format(i)\n elif hand_class==4:\n filename = 'HandImages2/img_four{0:03d}.png'.format(i)\n elif hand_class==5:\n filename = 'HandImages2/img_five{0:03d}.png'.format(i)\n\n img = cv2.imread(filename, cv2.IMREAD_GRAYSCALE)\n if img is None:\n continue\n print('{0}を読み込んでいます'.format(filename))\n\n\n #膨張\n kernel = np.ones((5,5),np.uint8)\n img = cv2.dilate(img,kernel,iterations = 1)\n\n # 画像から、学習用ベクトルの取得\n img_vector = getImageVector(img)\n\n\n\n # 学習用データの格納\n if img_vector.size > 0:\n X = np.append(X, img_vector, axis=0)\n y = np.append(y, hand_class)\n\n\n# ニューラルネットワークによる画像の学習\nclf = MLPClassifier(hidden_layer_sizes=(100,100), max_iter=300, tol=0.0001, random_state=None)\n\nprint('学習中…')\nclf.fit(X, y)\n\n# 学習結果のファイルへの書き出し\njoblib.dump(clf, savefile)\nprint('学習結果はファイル {0} に保存されました'.format(savefile))\n\n# 損失関数のグラフの軸ラベルを設定\nplt.xlabel('time step')\nplt.ylabel('loss')\n\n# グラフ縦軸の範囲を0以上と定める\nplt.ylim(0, max(clf.loss_curve_))\n\n# 損失関数の時間変化を描画\nplt.plot(clf.loss_curve_)\n\n# 描画したグラフを表示\nplt.show()\n" }, { "alpha_fraction": 0.5071839094161987, "alphanum_fraction": 0.5429597496986389, "avg_line_length": 37.241756439208984, "blob_id": "0106c4652943c2a0d7d6e15ed0ef062bd39f44eb", "content_id": "6f034469fc5e581f3def9c19630c61af5b6eb660", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7304, "license_type": "no_license", "max_line_length": 103, "num_lines": 182, "path": "/LeapImageBinary.py", "repo_name": "mmk-uk/HandShape-Recognition-with-Python-Leapmotion", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport cv2, Leap, math, ctypes\nimport numpy as np\nimport time\nfrom scipy import stats\n\ndef convert_distortion_maps(image):\n\n distortion_length = image.distortion_width * image.distortion_height\n xmap = np.zeros(distortion_length/2, dtype=np.float32)\n ymap = np.zeros(distortion_length/2, dtype=np.float32)\n\n for i in range(0, distortion_length, 2):\n xmap[distortion_length/2 - i/2 - 1] = image.distortion[i] * image.width\n ymap[distortion_length/2 - i/2 - 1] = image.distortion[i + 1] * image.height\n\n xmap = np.reshape(xmap, (image.distortion_height, image.distortion_width/2))\n ymap = np.reshape(ymap, (image.distortion_height, image.distortion_width/2))\n\n #resize the distortion map to equal desired destination image size\n resized_xmap = cv2.resize(xmap,\n (image.width, image.height),\n 0, 0,\n cv2.INTER_LINEAR)\n resized_ymap = cv2.resize(ymap,\n (image.width, image.height),\n 0, 0,\n cv2.INTER_LINEAR)\n\n #Use faster fixed point maps\n coordinate_map, interpolation_coefficients = cv2.convertMaps(resized_xmap,\n resized_ymap,\n cv2.CV_32FC1,\n nninterpolation = False)\n\n return coordinate_map, interpolation_coefficients\n\ndef undistort(image, coordinate_map, coefficient_map, width, height):\n destination = np.empty((width, height), dtype = np.ubyte)\n\n #wrap image data in numpy array\n i_address = int(image.data_pointer)\n ctype_array_def = ctypes.c_ubyte * image.height * image.width\n # as ctypes array\n as_ctype_array = ctype_array_def.from_address(i_address)\n # as numpy array\n as_numpy_array = np.ctypeslib.as_array(as_ctype_array)\n img = np.reshape(as_numpy_array, (image.height, image.width))\n\n #remap image to destination\n destination = cv2.remap(img,\n coordinate_map,\n coefficient_map,\n interpolation = cv2.INTER_LINEAR)\n\n #resize output to desired destination size\n destination = cv2.resize(destination,\n (width, height),\n 0, 0,\n cv2.INTER_LINEAR)\n return destination\n\ndef run(controller):\n maps_initialized = False\n while(True):\n #sleepで更新速度を制御\n time.sleep(0.1)\n frame = controller.frame()\n image = frame.images[0]\n if image.is_valid:\n if not maps_initialized:\n left_coordinates, left_coefficients = convert_distortion_maps(frame.images[0])\n right_coordinates, right_coefficients = convert_distortion_maps(frame.images[1])\n maps_initialized = True\n\n undistorted_left = undistort(image, left_coordinates, left_coefficients, 400, 400)\n undistorted_right = undistort(image, right_coordinates, right_coefficients, 400, 400)\n\n hand = undistorted_right[80:320,40:360]\n\n #hand_color = cv2.cvtColor(hand,cv2.COLOR_GRAY2BGR)\n\n #hand_gray = cv2.cvtColor(hand,cv2.COLOR_BGR2GRAY)\n\n #画像を2値化(白黒に処理)\n ret,my_hand = cv2.threshold(hand,70,255,cv2.THRESH_BINARY)\n\n # 以下、最も広い白領域のみを残すための計算\n # まず、白領域の塊(クラスター)にラベルを振る\n img_dist, img_label = cv2.distanceTransformWithLabels(255-my_hand, cv2.DIST_L2, 5)\n img_label = np.uint8(img_label) & my_hand\n # ラベル0は黒領域なので除外\n img_label_not_zero = img_label[img_label != 0]\n # 最も多く現れたラベルが最も広い白領域のラベル\n if len(img_label_not_zero) != 0:\n m = stats.mode(img_label_not_zero)[0]\n else:\n m = 0\n # 最も広い白領域のみを残す\n this_hand = np.uint8(img_label == m)*255\n\n #膨張\n kernel = np.ones((5,5),np.uint8)\n this_hand = cv2.dilate(this_hand,kernel,iterations = 1)\n\n hand_color = cv2.cvtColor(this_hand,cv2.COLOR_GRAY2BGR)\n\n # 輪郭を抽出\n contours,hierarchy = cv2.findContours(this_hand,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)\n cnt = contours[0]\n #hand_color = cv2.drawContours(hand_color, [cnt], 0, (0,255,0), 3)\n #hand_color = cv2.drawContours(hand_color, contours[0::4], 0, (0,255,0), 3)\n\n #hull = cv2.convexHull(cnt,returnPoints = False)\n #defects = cv2.convexityDefects(cnt,hull)\n\n #for i in range(defects.shape[0]):\n # s,e,f,d = defects[i,0]\n # start = tuple(cnt[s][0])\n # end = tuple(cnt[e][0])\n # far = tuple(cnt[f][0])\n # cv2.line(hand_color,start,end,[0,255,0],2)\n # cv2.circle(hand_color,far,5,[0,0,255],-1)\n\n #for p in cnt[0::6]:\n #point = tuple(p[0])\n #cv2.circle(hand_color,point,2,[0,0,255],-1)\n\n #重心を求めて表示\n mu = cv2.moments(this_hand, False)\n mx,my= int(mu[\"m10\"]/mu[\"m00\"]) , int(mu[\"m01\"]/mu[\"m00\"])\n cv2.circle(hand_color, (mx,my),2,[0,0,255],-1)\n\n #手首の位置を求める\n frame = controller.frame()\n righthand = frame.hands.rightmost\n arm = righthand.arm\n\n i_box = frame.interaction_box\n normalized_tip = i_box.normalize_point(arm.wrist_position)\n app_x = 160 * normalized_tip.x + 80\n app_y = 120 * (normalized_tip.z) + 60\n app = (int(app_x),int(app_y))\n\n cv2.circle(hand_color,app,2,[0,255,0],-1)\n\n #cv2.imshow('hand', hand_color)\n\n #重心と手首の位置から回転させる\n angle = 90 + math.degrees(math.atan2(my-app[1],mx-app[0]))\n print(angle)\n trans = cv2.getRotationMatrix2D((mx,my), angle , 1.0)\n hand_color = cv2.warpAffine(hand_color, trans, (360,240))\n\n #cv2.circle(hand_color,app,2,[255,0,0],-1)\n\n #手首以下を黒にする\n rx = int((app[0]-mx)*math.cos(angle)-(app[1]-my)*math.sin(angle) + mx)\n ry = int((app[0]-mx)*math.sin(angle)+(app[1]-my)*math.cos(angle) + my)\n #cv2.circle(hand_color, (rx,ry),2,[255,0,0],-1)\n print(ry)\n #hand_color = cv2.rectangle(hand_color, (0, ry), (360, 240), color=(0, 0, 0), thickness=-1)\n\n\n # 得られた二値化画像を画面に表示\n cv2.imshow('hand', hand_color)\n #cv2.imshow('hand', undistorted_right)\n\n\n if cv2.waitKey(1) & 0xFF == ord('q'):\n break\n\ndef main():\n controller = Leap.Controller()\n controller.set_policy_flags(Leap.Controller.POLICY_IMAGES)\n try:\n run(controller)\n except KeyboardInterrupt:\n sys.exit(0)\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.5733896493911743, "alphanum_fraction": 0.6451953649520874, "avg_line_length": 22.674999237060547, "blob_id": "2cd03e1a4d32809e71161e9d3cac45ecf6bda96a", "content_id": "5b17f7357d78b04f3153251e715841c1d88bed7b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 967, "license_type": "no_license", "max_line_length": 83, "num_lines": 40, "path": "/shape_hand.py", "repo_name": "mmk-uk/HandShape-Recognition-with-Python-Leapmotion", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport cv2\nimport numpy as np\n\nimg = cv2.imread('hand-learn/img_five010.png')\nimg_gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)\nret, thresh = cv2.threshold(img_gray, 127, 255,0)\ncontours,hierarchy = cv2.findContours(thresh,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)\n\ncnt = contours[0]\n\n#img = cv2.drawContours(img, [cnt], 0, (0,255,0), 3)\n\nimg_gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)\n\n#重心を求めて表示\n#mu = cv2.moments(img_gray, False)\n#mx,my= int(mu[\"m10\"]/mu[\"m00\"]) , int(mu[\"m01\"]/mu[\"m00\"])\n#cv2.circle(img, (mx,my), 4, 100, 2, 4)\n\n#hand_point = []\n\n#for p in cnt:\n# point = tuple(p[0])\n# if my > point[1]:\n# hand_point.append(point)\n\n#for i in range(len(hand_point) - 1):\n# start = hand_point[i]\n# end = hand_point[i+1]\n# cv2.line(img,start,end,[0,255,0],2)\n\n#膨張\nkernel = np.ones((5,5),np.uint8)\nimg = cv2.dilate(img,kernel,iterations = 1)\n\n\ncv2.imshow('img',img)\ncv2.waitKey(0)\ncv2.destroyAllWindows()\n" }, { "alpha_fraction": 0.5346447229385376, "alphanum_fraction": 0.5629351139068604, "avg_line_length": 33.53816604614258, "blob_id": "4c0d2e88f6eba7d69af16724cca1f95685ede927", "content_id": "9ed9f1007ddfdfa270543d24bd3af1fd85ad7482", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9907, "license_type": "no_license", "max_line_length": 102, "num_lines": 262, "path": "/HandRecognitionCheck.py", "repo_name": "mmk-uk/HandShape-Recognition-with-Python-Leapmotion", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport cv2, Leap, math, ctypes\nimport sys\nimport numpy as np\nfrom scipy import stats\nfrom sklearn.neural_network import MLPClassifier\nfrom sklearn.externals import joblib\nimport time\nfrom decimal import Decimal\nimport matplotlib.pyplot as plt\n\n# 学習に用いる縮小画像のサイズ\nsw = 160\nsh = 120\n\n# 手の認識用パラメータ(HチャンネルとSチャンネルとを二値化するための条件)\nhmin = 0\nhmax = 30 # 15-40程度にセット\nsmin = 50\n\nhand_class = ['0', '1', '2', '3', '4', '5']\n\n#表示の設定\nfig = plt.figure(figsize=(8,8))\ncheckbar = fig.add_subplot(2,2,1)\nrecog_image = fig.add_subplot(2,2,4)\nrecog_image.tick_params(bottom=False,\n left=False,\n right=False,\n top=False)\nrecog_image.tick_params(labelbottom=False,\n labelleft=False,\n labelright=False,\n labeltop=False)\nprediction_num = fig.add_subplot(2,2,3)\nprediction_num.tick_params(bottom=False,\n left=False,\n right=False,\n top=False)\nprediction_num.tick_params(labelbottom=False,\n labelleft=False,\n labelright=False,\n labeltop=False)\n#result_num = fig.add_subplot(2,2,2)\n#result_num.tick_params(bottom=False,\n# left=False,\n# right=False,\n# top=False)\n#result_num.tick_params(labelbottom=False,\n# labelleft=False,\n# labelright=False,\n# labeltop=False)\n\n# 学習済ファイルの確認\nif len(sys.argv)==2:\n savefile = sys.argv[1]\n try:\n clf = joblib.load(savefile)\n except IOError:\n print('学習済ファイル{0}を開けません'.format(savefile))\n sys.exit()\nelse:\n print('使用法: python ml-08-04-recognition.py 学習済ファイル.pkl')\n sys.exit()\n\ndef getImageVector(img):\n # 白い領域(ピクセル値が0でない領域)の座標を集める\n nonzero = cv2.findNonZero(img)\n # その領域を囲う四角形の座標と大きさを取得\n xx, yy, ww, hh = cv2.boundingRect(nonzero)\n # 白い領域を含む最小の矩形領域を取得\n img_nonzero = img[yy:yy+hh, xx:xx+ww]\n #cv2.imshow('img_nonzero', img_nonzero)\n # 白い領域を(sw, sh)サイズに縮小するための準備\n img_small = np.zeros((sh, sw), dtype=np.uint8)\n # 画像のアスペクト比を保ったまま、白い領域を縮小してimg_smallにコピーする\n if 4*hh < ww*3 and hh > 0:\n htmp = int(sw*hh/ww)\n if htmp>0:\n img_small_tmp = cv2.resize(img_nonzero, (sw, htmp), interpolation=cv2.INTER_LINEAR)\n img_small[(sh-htmp)//2:(sh-htmp)//2+htmp, 0:sw] = img_small_tmp\n elif 4*hh >= ww*3 and ww > 0:\n wtmp = int(sh*ww/hh)\n if wtmp>0:\n img_small_tmp = cv2.resize(img_nonzero, (wtmp, sh), interpolation=cv2.INTER_LINEAR)\n img_small[0:sh, (sw-wtmp)//2:(sw-wtmp)//2+wtmp] = img_small_tmp\n # 0...1の範囲にスケーリングしてからリターンする\n #cv2.imshow('img_small', img_small)\n return np.array([img_small.ravel()/255.])\n\ndef convert_distortion_maps(image):\n\n distortion_length = image.distortion_width * image.distortion_height\n xmap = np.zeros(distortion_length/2, dtype=np.float32)\n ymap = np.zeros(distortion_length/2, dtype=np.float32)\n\n for i in range(0, distortion_length, 2):\n xmap[distortion_length/2 - i/2 - 1] = image.distortion[i] * image.width\n ymap[distortion_length/2 - i/2 - 1] = image.distortion[i + 1] * image.height\n\n xmap = np.reshape(xmap, (image.distortion_height, image.distortion_width/2))\n ymap = np.reshape(ymap, (image.distortion_height, image.distortion_width/2))\n\n #resize the distortion map to equal desired destination image size\n resized_xmap = cv2.resize(xmap,\n (image.width, image.height),\n 0, 0,\n cv2.INTER_LINEAR)\n resized_ymap = cv2.resize(ymap,\n (image.width, image.height),\n 0, 0,\n cv2.INTER_LINEAR)\n\n #Use faster fixed point maps\n coordinate_map, interpolation_coefficients = cv2.convertMaps(resized_xmap,\n resized_ymap,\n cv2.CV_32FC1,\n nninterpolation = False)\n\n return coordinate_map, interpolation_coefficients\n\ndef undistort(image, coordinate_map, coefficient_map, width, height):\n destination = np.empty((width, height), dtype = np.ubyte)\n\n #wrap image data in numpy array\n i_address = int(image.data_pointer)\n ctype_array_def = ctypes.c_ubyte * image.height * image.width\n # as ctypes array\n as_ctype_array = ctype_array_def.from_address(i_address)\n # as numpy array\n as_numpy_array = np.ctypeslib.as_array(as_ctype_array)\n img = np.reshape(as_numpy_array, (image.height, image.width))\n\n #remap image to destination\n destination = cv2.remap(img,\n coordinate_map,\n coefficient_map,\n interpolation = cv2.INTER_LINEAR)\n\n #resize output to desired destination size\n destination = cv2.resize(destination,\n (width, height),\n 0, 0,\n cv2.INTER_LINEAR)\n return destination\n\ndef run(controller):\n maps_initialized = False\n while(True):\n #sleepで更新速度を制御\n #time.sleep(0.01)\n frame = controller.frame()\n image = frame.images[0]\n if image.is_valid:\n if not maps_initialized:\n left_coordinates, left_coefficients = convert_distortion_maps(frame.images[0])\n right_coordinates, right_coefficients = convert_distortion_maps(frame.images[1])\n maps_initialized = True\n\n undistorted_left = undistort(image, left_coordinates, left_coefficients, 400, 400)\n undistorted_right = undistort(image, right_coordinates, right_coefficients, 400, 400)\n\n #画像を2値化(白黒に処理)\n ret,hand = cv2.threshold(undistorted_right,90,255,cv2.THRESH_BINARY)\n\n my_hand = hand[80:320,40:360]\n\n # 以下、最も広い白領域のみを残すための計算\n # まず、白領域の塊(クラスター)にラベルを振る\n img_dist, img_label = cv2.distanceTransformWithLabels(255-my_hand, cv2.DIST_L2, 5)\n img_label = np.uint8(img_label) & my_hand\n # ラベル0は黒領域なので除外\n img_label_not_zero = img_label[img_label != 0]\n # 最も多く現れたラベルが最も広い白領域のラベル\n if len(img_label_not_zero) != 0:\n m = stats.mode(img_label_not_zero)[0]\n else:\n m = 0\n # 最も広い白領域のみを残す\n this_hand = np.uint8(img_label == m)*255\n\n #膨張\n kernel = np.ones((4,4),np.uint8)\n #this_hand = cv2.erode(this_hand,kernel,iterations = 1)\n this_hand = cv2.dilate(this_hand,kernel,iterations = 1)\n\n # 輪郭を抽出\n contours,hierarchy = cv2.findContours(this_hand,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)\n cnt = contours[0]\n\n #重心を求める\n mu = cv2.moments(this_hand, False)\n mx,my= int(mu[\"m10\"]/mu[\"m00\"]) , int(mu[\"m01\"]/mu[\"m00\"])\n\n #手首の位置を求める\n frame = controller.frame()\n righthand = frame.hands.rightmost\n arm = righthand.arm\n\n i_box = frame.interaction_box\n normalized_tip = i_box.normalize_point(arm.wrist_position)\n app_x = 160 * normalized_tip.x + 80\n app_y = 120 * (normalized_tip.z) + 60\n app = (int(app_x),int(app_y))\n\n #重心と手首の位置から回転させる\n angle = 90 + math.degrees(math.atan2(my-app[1],mx-app[0]))\n trans = cv2.getRotationMatrix2D((mx,my), angle , 1.0)\n this_hand = cv2.warpAffine(this_hand, trans, (360,240))\n\n\n # 最大の白領域からscikit-learnに入力するためのベクトルを取得\n hand_vector = getImageVector(this_hand)\n\n # 学習済のニューラルネットワークから分類結果を取得\n result = clf.predict(hand_vector)\n # 分類結果を表示\n #print(hand_class[result[0]])\n\n pp = clf.predict_proba(hand_vector)[0]\n hc = [0,1,2,3,4,5]\n\n recog_image.cla()\n checkbar.cla()\n prediction_num.cla()\n #result_num.cla()\n\n checkbar.bar(hc,pp)\n checkbar.set_xticks(hc,hand_class)\n checkbar.set_ylim([0,1])\n\n this_hand = cv2.cvtColor(this_hand, cv2.COLOR_GRAY2RGB)\n recog_image.imshow(this_hand)\n\n prediction_num.text(0.3,0.3,str(hand_class[result[0]]),size=100)\n\n #if pp[int(hand_class[result[0]])] > 0.9:\n # result_num.text(0.3,0.3,str(hand_class[result[0]]),size=100)\n #else:\n # result_num.text(0.3,0.3,\" \",size=100)\n\n plt.draw()\n\n plt.pause(0.001)\n\n\n #if cv2.waitKey(1) & 0xFF == ord('q'):\n if 0xFF == ord('q'):\n break\n\ndef main():\n controller = Leap.Controller()\n controller.set_policy_flags(Leap.Controller.POLICY_IMAGES)\n\n print('認識を開始します')\n try:\n run(controller)\n except KeyboardInterrupt:\n sys.exit(0)\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.6056379675865173, "alphanum_fraction": 0.6448071002960205, "avg_line_length": 25.535432815551758, "blob_id": "e9667f622be84cbdf5e9959fcd6a41fb9974fdca", "content_id": "d10fc930ddaa9e4d97320f460813da6c29783575", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4558, "license_type": "no_license", "max_line_length": 95, "num_lines": 127, "path": "/recognition.py", "repo_name": "mmk-uk/HandShape-Recognition-with-Python-Leapmotion", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport cv2\nimport sys\nimport numpy as np\nfrom scipy import stats\nfrom sklearn.neural_network import MLPClassifier\nfrom sklearn.externals import joblib\n\n# 学習に用いる縮小画像のサイズ\nsw = 16\nsh = 12\n\n# 手の認識用パラメータ(HチャンネルとSチャンネルとを二値化するための条件)\nhmin = 0\nhmax = 30 # 15-40程度にセット\nsmin = 50\n\njanken_class = ['グー', 'チョキ', 'パー']\n\n# 学習済ファイルの確認\nif len(sys.argv)==2:\n savefile = sys.argv[1]\n try:\n clf = joblib.load(savefile)\n except IOError:\n print('学習済ファイル{0}を開けません'.format(savefile))\n sys.exit()\nelse:\n print('使用法: python ml-08-04-recognition.py 学習済ファイル.pkl')\n sys.exit()\n\ndef getImageVector(img):\n # 白い領域(ピクセル値が0でない領域)の座標を集める\n nonzero = cv2.findNonZero(img)\n # その領域を囲う四角形の座標と大きさを取得\n xx, yy, ww, hh = cv2.boundingRect(nonzero)\n # 白い領域を含む最小の矩形領域を取得\n img_nonzero = img[yy:yy+hh, xx:xx+ww]\n\n # 白い領域を(sw, sh)サイズに縮小するための準備\n img_small = np.zeros((sh, sw), dtype=np.uint8)\n\n # 画像のアスペクト比を保ったまま、白い領域を縮小してimg_smallにコピーする\n if 4*hh < ww*3 and hh > 0:\n htmp = int(sw*hh/ww)\n if htmp>0:\n img_small_tmp = cv2.resize(img_nonzero, (sw, htmp), interpolation=cv2.INTER_LINEAR)\n img_small[(sh-htmp)//2:(sh-htmp)//2+htmp, 0:sw] = img_small_tmp\n elif 4*hh >= ww*3 and ww > 0:\n wtmp = int(sh*ww/hh)\n if wtmp>0:\n img_small_tmp = cv2.resize(img_nonzero, (wtmp, sh), interpolation=cv2.INTER_LINEAR)\n img_small[0:sh, (sw-wtmp)//2:(sw-wtmp)//2+wtmp] = img_small_tmp\n\n # 0...1の範囲にスケーリングしてからリターンする\n return np.array([img_small.ravel()/255.])\n\nprint('認識を開始します')\n\ncap = cv2.VideoCapture(0)\n\n# カメラの解像度を320x240にセット\ncap.set(3, 320) # 横サイズ\ncap.set(4, 240) # 縦サイズ\n# カメラのフレームレートを15fpsにセット\ncap.set(cv2.CAP_PROP_FPS, 15)\n\nwhile True:\n ret, frame = cap.read()\n if ret == False:\n break\n\n # 映像データをHSV形式に変換\n hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)\n # HSV形式からHチャンネルとSチャンネルの画像を得る\n hsv_channels = cv2.split(hsv)\n h_channel = hsv_channels[0]\n s_channel = hsv_channels[1]\n\n # Hチャンネルを平滑化\n h_binary = cv2.GaussianBlur(h_channel, (5,5), 0)\n\n # Hチャンネルの二値化画像を作成\n # hmin~hmaxの範囲を255(白)に、それ以外を0(黒)に\n ret,h_binary = cv2.threshold(h_binary, hmax, 255, cv2.THRESH_TOZERO_INV)\n ret,h_binary = cv2.threshold(h_binary, hmin, 255, cv2.THRESH_BINARY)\n # Sチャンネルの二値化画像を作成\n # smin~255の範囲を255(白)に、それ以外を0に(黒)に\n ret,s_binary = cv2.threshold(s_channel, smin, 255, cv2.THRESH_BINARY)\n\n # HチャンネルとSチャンネルの二値化画像のANDをとる\n # HチャンネルとSチャンネルの両方で255(白)の領域のみ白となる\n hs_and = h_binary & s_binary\n\n # 以下、最も広い白領域のみを残すための計算\n # まず、白領域の塊(クラスター)にラベルを振る\n img_dist, img_label = cv2.distanceTransformWithLabels(255-hs_and, cv2.DIST_L2, 5)\n img_label = np.uint8(img_label) & hs_and\n # ラベル0は黒領域なので除外\n img_label_not_zero = img_label[img_label != 0]\n # 最も多く現れたラベルが最も広い白領域のラベル\n if len(img_label_not_zero) != 0:\n m = stats.mode(img_label_not_zero)[0]\n else:\n m = 0\n # 最も広い白領域のみを残す\n hand = np.uint8(img_label == m)*255\n\n # 最大の白領域からscikit-learnに入力するためのベクトルを取得\n hand_vector = getImageVector(hand)\n\n\n # 学習済のニューラルネットワークから分類結果を取得\n result = clf.predict(hand_vector)\n # 分類結果を表示\n print(janken_class[result[0]])\n\n # 得られた二値化画像を画面に表示\n cv2.imshow('hand', hand)\n\n # 'q'を入力でアプリケーション終了\n key = cv2.waitKey(1)\n if key & 0xFF == ord('q'):\n break\n\ncap.release()\ncv2.destroyAllWindows()\n" }, { "alpha_fraction": 0.5782003998756409, "alphanum_fraction": 0.6111268997192383, "avg_line_length": 31.32110023498535, "blob_id": "f05e630fbd967113eeee87661aca97d23567dec3", "content_id": "1a53ac0b3fa240f6cf786d3c92a0d75aa203daae", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4249, "license_type": "no_license", "max_line_length": 95, "num_lines": 109, "path": "/ModelEvaluation.py", "repo_name": "mmk-uk/HandShape-Recognition-with-Python-Leapmotion", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport cv2, Leap, math, ctypes\nimport sys\nimport numpy as np\nfrom scipy import stats\nfrom sklearn.neural_network import MLPClassifier\nfrom sklearn.externals import joblib\nfrom sklearn.metrics import classification_report\nfrom decimal import Decimal\nimport matplotlib.pyplot as plt\n\n# 学習に用いる縮小画像のサイズ\nsw = 160\nsh = 120\n\n# 学習済ファイルの確認\nif len(sys.argv)==2:\n savefile = sys.argv[1]\n try:\n clf = joblib.load(savefile)\n except IOError:\n print('学習済ファイル{0}を開けません'.format(savefile))\n sys.exit()\nelse:\n print('使用法: python ml-08-04-recognition.py 学習済ファイル.pkl')\n sys.exit()\n\ndef getImageVector(img):\n # 白い領域(ピクセル値が0でない領域)の座標を集める\n nonzero = cv2.findNonZero(img)\n # その領域を囲う四角形の座標と大きさを取得\n xx, yy, ww, hh = cv2.boundingRect(nonzero)\n # 白い領域を含む最小の矩形領域を取得\n img_nonzero = img[yy:yy+hh, xx:xx+ww]\n # 白い領域を(sw, sh)サイズに縮小するための準備\n img_small = np.zeros((sh, sw), dtype=np.uint8)\n # 画像のアスペクト比を保ったまま、白い領域を縮小してimg_smallにコピーする\n if 4*hh < ww*3 and hh > 0:\n htmp = int(sw*hh/ww)\n if htmp>0:\n img_small_tmp = cv2.resize(img_nonzero, (sw, htmp), interpolation=cv2.INTER_LINEAR)\n img_small[(sh-htmp)//2:(sh-htmp)//2+htmp, 0:sw] = img_small_tmp\n elif 4*hh >= ww*3 and ww > 0:\n wtmp = int(sh*ww/hh)\n if wtmp>0:\n img_small_tmp = cv2.resize(img_nonzero, (wtmp, sh), interpolation=cv2.INTER_LINEAR)\n img_small[0:sh, (sw-wtmp)//2:(sw-wtmp)//2+wtmp] = img_small_tmp\n # 0...1の範囲にスケーリングしてからリターンする\n return np.array([img_small.ravel()/255.])\n\n# X:画像から計算したベクトル、y:正解データ\ntestX = np.empty((0,sw*sh), float)\ntesty = np.array([], int)\n\n# 手の画像の読み込み\nfor hand_class in [0, 1, 2, 3, 4, 5]:\n\n # 画像番号0から999まで対応\n for i in range(1000):\n if hand_class==0:\n filename = 'hand-learn5/img_zero{0:03d}.png'.format(i)\n elif hand_class==1:\n filename = 'hand-learn5/img_one{0:03d}.png'.format(i)\n elif hand_class==2:\n filename = 'hand-learn5/img_two{0:03d}.png'.format(i)\n elif hand_class==3:\n filename = 'hand-learn5/img_three{0:03d}.png'.format(i)\n elif hand_class==4:\n filename = 'hand-learn5/img_four{0:03d}.png'.format(i)\n elif hand_class==5:\n filename = 'hand-learn5/img_five{0:03d}.png'.format(i)\n\n my_hand = cv2.imread(filename, cv2.IMREAD_GRAYSCALE)\n if my_hand is None:\n continue\n #print('{0}を読み込んでいます'.format(filename))\n\n # 以下、最も広い白領域のみを残すための計算\n # まず、白領域の塊(クラスター)にラベルを振る\n img_dist, img_label = cv2.distanceTransformWithLabels(255-my_hand, cv2.DIST_L2, 5)\n img_label = np.uint8(img_label) & my_hand\n # ラベル0は黒領域なので除外\n img_label_not_zero = img_label[img_label != 0]\n # 最も多く現れたラベルが最も広い白領域のラベル\n if len(img_label_not_zero) != 0:\n m = stats.mode(img_label_not_zero)[0]\n else:\n m = 0\n # 最も広い白領域のみを残す\n this_hand = np.uint8(img_label == m)*255\n\n #膨張\n #kernel = np.ones((4,4),np.uint8)\n #this_hand = cv2.erode(this_hand,kernel,iterations = 1)\n #this_hand = cv2.dilate(this_hand,kernel,iterations = 1)\n\n # 画像から、学習用ベクトルの取得\n img_vector = getImageVector(this_hand)\n # 学習用データの格納\n if img_vector.size > 0:\n testX = np.append(testX, img_vector, axis=0)\n testy = np.append(testy, hand_class)\n\n\nprint('評価中…')\n# 学習済のニューラルネットワークから分類結果を取得\nresult = clf.predict(testX)\n\nprint classification_report(testy, result)\n" }, { "alpha_fraction": 0.5033025145530701, "alphanum_fraction": 0.574636697769165, "avg_line_length": 36.849998474121094, "blob_id": "c7e4f2604c7c21f0a24541fd94d95064a2f36eb1", "content_id": "c8c434c7269935df7673ee596cc90a16a6407c05", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1570, "license_type": "no_license", "max_line_length": 78, "num_lines": 40, "path": "/DataRename.py", "repo_name": "mmk-uk/HandShape-Recognition-with-Python-Leapmotion", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport cv2\nimport sys\nimport numpy as np\n\n# 手の画像の読み込み\nfor hand_class in [0, 1, 2, 3, 4, 5]:\n\n # 画像番号0から999まで対応\n for i in range(900):\n if hand_class==0:\n filename = 'hand-learn2/img_zero{0:03d}.png'.format(i)\n elif hand_class==1:\n filename = 'hand-learn2/img_one{0:03d}.png'.format(i)\n elif hand_class==2:\n filename = 'hand-learn2/img_two{0:03d}.png'.format(i)\n elif hand_class==3:\n filename = 'hand-learn2/img_three{0:03d}.png'.format(i)\n elif hand_class==4:\n filename = 'hand-learn2/img_four{0:03d}.png'.format(i)\n elif hand_class==5:\n filename = 'hand-learn2/img_five{0:03d}.png'.format(i)\n\n img = cv2.imread(filename, cv2.IMREAD_GRAYSCALE)\n if img is None:\n continue\n print('{0}を読み込んでいます'.format(filename))\n\n if hand_class==0:\n cv2.imwrite('hand-learn22/img_zero{0:03d}.png'.format(i+100),img)\n elif hand_class==1:\n cv2.imwrite('hand-learn22/img_one{0:03d}.png'.format(i+100),img)\n elif hand_class==2:\n cv2.imwrite('hand-learn22/img_two{0:03d}.png'.format(i+100),img)\n elif hand_class==3:\n cv2.imwrite('hand-learn22/img_three{0:03d}.png'.format(i+100),img)\n elif hand_class==4:\n cv2.imwrite('hand-learn22/img_four{0:03d}.png'.format(i+100),img)\n elif hand_class==5:\n cv2.imwrite('hand-learn22/img_five{0:03d}.png'.format(i+100),img)\n" } ]
10
abdo-fysal/gesture-recognition
https://github.com/abdo-fysal/gesture-recognition
118f29f60ee9339c621444ac2a9aa3d96d8adca7
40772977ead05aa1e6a58450a3d7537f877e83dd
ba93be85e5f5f031884e6f3ca5a7a018b887ce10
refs/heads/master
2021-08-14T10:36:24.283082
2017-11-15T11:40:16
2017-11-15T11:40:16
110,823,212
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5929054021835327, "alphanum_fraction": 0.6385135054588318, "avg_line_length": 27.238094329833984, "blob_id": "613743c5f5b195f993b01a925b01514b5ce94a5d", "content_id": "928831e55b97f4949b5f7fe760d2044edecfc601", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 592, "license_type": "no_license", "max_line_length": 61, "num_lines": 21, "path": "/hand recognition.py", "repo_name": "abdo-fysal/gesture-recognition", "src_encoding": "UTF-8", "text": "import numpy as np\nimport cv2\ncap = cv2.VideoCapture(0)\n# take first f`rame of the video\nret,image = cap.read()\nhand_classifier=cv2.CascadeClassifier('r.xml')\nwhile(ret):\n ret ,image = cap.read()\n\n gray=cv2.cvtColor(image,cv2.COLOR_BGR2GRAY)\n hands=hand_classifier.detectMultiScale(gray,1.3,5)\n if hands is ():\n print(\"NO hands found\")\n for(x,y,w,h) in hands:\n im=cv2.rectangle(image,(x,y),(x+w,y+h),(127,0,255),2)\n print(hands)\n cv2.imshow('hand detection',image)\n k = cv2.waitKey(60) & 0xff\n if k == 27:\n break\ncv2.destroyAllWindows()" } ]
1
jamestut/simple-web-server
https://github.com/jamestut/simple-web-server
12c7e63a59a9b0d7eb36a377e7621ef81c5459eb
bd7d0757d7972d565bc2fdbd520589ca3533fbf7
94f28dc0eee4f47ef6801c623fbd1961507374e9
refs/heads/master
2020-05-23T05:03:37.135438
2019-05-14T15:02:06
2019-05-14T15:02:06
186,644,739
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5252594947814941, "alphanum_fraction": 0.542837381362915, "avg_line_length": 31.549549102783203, "blob_id": "40d73aa996a0dd36ad69a6121b28369d15b70a2e", "content_id": "0e7c91d17540ffe3bf95e553fc5a893f6e8ca895", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7225, "license_type": "permissive", "max_line_length": 114, "num_lines": 222, "path": "/src/simplewebserver.py", "repo_name": "jamestut/simple-web-server", "src_encoding": "UTF-8", "text": "import uvicorn\nimport argparse\nimport os\nimport collections\nimport multipart_stream\n\nargs = None\n\nEMPTY_RESPONSE = {\n 'type': 'http.response.body',\n 'body': b''\n}\n\ndef gen_header(status=200, content_type=None, custom_headers=[]):\n obj = {\n 'type': 'http.response.start',\n 'status': status,\n 'headers': []\n }\n if content_type is not None:\n obj['headers'].extend([\n [b'content-type', content_type.encode('UTF-8')],\n [b'Access-Control-Allow-Origin', b'*']\n ])\n obj['headers'].extend(custom_headers)\n return obj\n\n\ndef gen_abs_path(scope, path):\n # default host value if not set in header\n host = scope[\"server\"][0] + (':' + str(scope[\"server\"][1]) if scope[\"server\"][1] != 80 else \"\")\n # look for host header\n for header in scope[\"headers\"]:\n if header[0].decode() == \"host\":\n host = header[1].decode()\n break\n\n return f'{scope[\"scheme\"]}://{host}{path}'\n\ndef gen_text_response(message):\n return {\n 'type': 'http.response.body',\n 'body': message.encode('UTF-8')\n }\n\n\ndef human_size(size):\n fmtstr = \"{0:.2f} {1}\"\n if size > 1000000000:\n unit = \"GB\"\n szf = size / 1000000000\n elif size > 1000000:\n unit = \"MB\"\n szf = size / 1000000\n elif size > 1000:\n unit = \"kB\"\n szf = size / 1000\n else:\n unit = \"B\"\n szf = size\n fmtstr = \"{0:.0f} {1}\"\n\n return fmtstr.format(szf, unit)\n\n\ndef create_dir_list_page(scdiriter, pwd):\n template_begin = \"\"\"\n <!doctype html>\n <html>\n <head><title>Directory Listing</title></head>\n <body>\n <table>\n <tr>\n <th align=\"left\">Name</th>\n <th align=\"left\">Size</th>\n </tr>\n \"\"\"\n template_end = \"\"\"\n </table>\n <hr>\n <form method=\"POST\" enctype=\"multipart/form-data\">\n <input type=\"file\" name=\"data\" />\n <input type=\"submit\"/>\n </form>\n </body>\n </html>\n \"\"\"\n\n dirs = collections.OrderedDict()\n files = collections.OrderedDict()\n\n for f in scdiriter:\n if f.is_dir():\n dirs[f.name] = (f.path[1:],)\n else:\n try:\n size = f.stat().st_size\n except:\n size = -1\n files[f.name] = (f.path[1:], size,)\n pass\n scdiriter.close()\n\n # we use list for quicker concat. this is akin to stringbuilder.\n retarr = [template_begin]\n # go up!\n if pwd != '/':\n parentpath = os.path.normpath(os.path.join(pwd, '..'))\n retarr.append(f'<tr><td><a href=\"{parentpath}\">Up One Level</a></td><td></td></tr>')\n # directories first, then files\n for dir, md in dirs.items():\n retarr.append(f'<tr><td><a href=\"{md[0]}\">{dir}</a></td><td>(dir)</td></tr>')\n for file, md in files.items():\n retarr.append(f'<tr><td><a href=\"{md[0]}\">{file}</a></td><td>{human_size(md[1])}</td></tr>')\n retarr.append(template_end)\n\n return \"\".join(retarr)\n\n\nasync def download_file(path, send):\n buffsz = 262144\n buff = b''\n try:\n with open(path, 'rb') as fh:\n filesize = os.fstat(fh.fileno()).st_size\n aux_header = []\n if filesize > 0:\n aux_header.append([b'content-length', str(filesize).encode('UTF-8')])\n await send(gen_header(200, \"application/octet-stream\", aux_header))\n while True:\n buff = fh.read(buffsz)\n if len(buff) <= 0:\n await send(EMPTY_RESPONSE)\n break\n await send({\n 'type': 'http.response.body',\n 'body': buff,\n 'more_body': True\n })\n except PermissionError as ex:\n await send(gen_header(403))\n await send(gen_text_response(str(ex)))\n except Exception as ex:\n await send(gen_header(500))\n await send(gen_text_response(str(ex)))\n\n\nasync def upload_file(canopath, path, scope, send, receive):\n try:\n with multipart_stream.MultipartStream(scope, path, b'data') as msreader:\n more_body = True\n while more_body:\n msg = await receive()\n # debug\n body = msg.get('body', b'')\n print(f\"Received chunk size: {len(body)} bytes.\")\n msreader.add_chunk(body)\n more_body = msg.get('more_body', False)\n # tell the client to refresh the page using GET!\n redir_hdr = [[b'location', gen_abs_path(scope, canopath).encode('UTF-8')]]\n await send(gen_header(303, custom_headers=redir_hdr))\n await send(EMPTY_RESPONSE)\n except PermissionError as ex:\n await send(gen_header(403))\n await send(gen_text_response(str(\"Writing to this resource is not allowed.\")))\n except FileNotFoundError as ex:\n await send(gen_header(404))\n await send(gen_text_response(\"Parent directory not found.\"))\n except Exception as ex:\n await send(gen_header(500))\n await send(gen_text_response(str(ex)))\n\n\nasync def app(scope, receive, send):\n assert scope['type'] == 'http'\n canopath = os.path.normpath(scope['path'])\n if scope['path'] != canopath:\n await send(gen_header(301, custom_headers=[[b'location', gen_abs_path(scope, canopath).encode('UTF-8')]]))\n await send(EMPTY_RESPONSE)\n return\n\n path = '.' + canopath\n\n if scope['method'] in ('HEAD', 'GET'):\n if os.path.isdir(path):\n try:\n scan_iter = os.scandir(path)\n dirlist = gen_text_response(create_dir_list_page(scan_iter, canopath))\n await send(gen_header(200, \"text/html\"))\n await send(dirlist)\n except PermissionError as ex:\n await send(gen_header(403))\n await send(gen_text_response(\"Access to this resource is not allowed.\"))\n return\n except Exception as ex:\n await send(gen_header(500))\n await send(gen_text_response(str(ex)))\n return\n elif os.path.isfile(path):\n await download_file(path, send)\n else:\n await send(gen_header(404))\n await send(gen_text_response(\"Requested resource not found.\"))\n elif scope['method'] == 'POST':\n if os.path.isdir(path):\n await upload_file(canopath, path, scope, send, receive)\n else:\n await send(gen_header(405))\n await send(gen_text_response(\"Must POST inside a directory\"))\n else:\n await send(gen_header(405))\n await send(EMPTY_RESPONSE)\n\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser(description=\"A simple static web server.\")\n parser.add_argument(\"--port\", type=int, default=8080, help=\"Port number to listen to.\")\n parser.add_argument(\"--cwd\", type=str, default='.', help=\"Base directory.\")\n parser.add_argument(\"--bind\", type=str, default='0.0.0.0', help=\"IP address to bind to.\")\n args = parser.parse_args()\n os.chdir(args.cwd)\n uvicorn.run(app, host=args.bind, port=args.port, log_level=\"info\")" }, { "alpha_fraction": 0.5396341681480408, "alphanum_fraction": 0.5487805008888245, "avg_line_length": 34.875, "blob_id": "4114568a51c48cf99d6269cf3d6ef829af419b19", "content_id": "11663cc2419612c771a48e452b855d4dd6a23dd8", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2296, "license_type": "permissive", "max_line_length": 90, "num_lines": 64, "path": "/src/memoryviewbyteutils.py", "repo_name": "jamestut/simple-web-server", "src_encoding": "UTF-8", "text": "class MemoryViewWrapper():\n def __init__(self, ba, slice = None):\n if type(ba) == bytes:\n self.obj = memoryview(ba)\n self._slice = (0, len(ba))\n if slice is not None:\n raise ValueError(\"Cannot assign slice here.\")\n elif type(ba) == memoryview:\n self.obj = ba\n self._slice = slice\n else:\n raise TypeError(\"Expected a bytes object.\")\n\n def __len__(self):\n return self._slice[1] - self._slice[0]\n\n def __eq__(self, other):\n return self.obj == other\n\n def __ne__(self, other):\n return self.obj != other\n\n def __getitem__(self, index):\n # simple index access\n if type(index) == int:\n return self.obj[index]\n\n if index.step != None:\n raise ValueError(\"Step slicing is not supported.\")\n\n # determine the positive version of the given indices\n def normalize_index(idx):\n ret = idx if idx >= 0 else len(self) + idx\n # bound from 0 to length of data\n return max(0, min(len(self), ret))\n\n start = 0 if index.start is None else normalize_index(index.start)\n stop = len(self) if index.stop is None else normalize_index(index.stop)\n subview = self.obj[start:stop]\n return MemoryViewWrapper(subview, (start + self._slice[0], stop + self._slice[0]))\n\n def _what_i_see(self):\n # for debug purpose only\n return self.obj.obj[self._slice[0]:self._slice[1]]\n\n def find(self, substr, start=None, end=None):\n if (start is not None and start < 0) or (end is not None and end < 0):\n raise ValueError(\"Bounding indices must not be a negative.\")\n result = self.obj.obj.find(substr,\n self._slice[0] + (0 if start is None else start),\n self._slice[1] - (0 if end is None else end))\n return result if result < 0 else result - self._slice[0]\n\n def split(self, delim):\n ret = []\n start = 0\n while True:\n newstart = self.find(delim, start)\n if newstart < 0: break\n ret.append(self[start:newstart])\n start = newstart + len(delim)\n # latest item\n ret.append(self[start:])\n return ret\n" }, { "alpha_fraction": 0.4895056188106537, "alphanum_fraction": 0.4955734610557556, "avg_line_length": 47.565216064453125, "blob_id": "cdadc87db0c31d393bf35a6b347830d5af305da6", "content_id": "908aba6b20608e0e7c424860a15f5b46ff1036d2", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10053, "license_type": "permissive", "max_line_length": 155, "num_lines": 207, "path": "/src/multipart_stream.py", "repo_name": "jamestut/simple-web-server", "src_encoding": "UTF-8", "text": "from enum import Enum, auto\nimport re\nimport os\nimport time\nfrom memoryviewbyteutils import MemoryViewWrapper\n\n\nclass _States(Enum):\n READY = auto(),\n CD_HDR = auto(),\n DATA = auto(),\n DATA_END = auto(),\n FINISHED = auto()\n\n\nclass MultipartStream:\n def __init__(self, scope, path, look_for):\n # const\n self._CRLF = b'\\r\\n'\n # initial variables\n self._boundary = None\n self._old_chunk = None\n self._cd_str = None\n self._cd_name = None\n self._cd_filename = None\n self._saved_data_chunk = None\n self._data_end_marker = None\n self._fh = None\n self._path = path\n # this is the field that we're looking after\n if not hasattr(look_for, 'decode'): raise TypeError(\"Expected bytes-like object for look_for.\")\n self._look_for = look_for\n # initial parsing of header data\n self._parse_header_content(scope)\n # initial states\n self._state = _States.READY\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n if self._fh is not None:\n self._fh.close()\n self._fh = None\n\n def _parse_header_content(self, scope):\n for tpl in scope['headers']:\n if tpl[0] == b'content-type':\n ct_params = MemoryViewWrapper(tpl[1]).split(b'; ')\n if ct_params[0].obj != b'multipart/form-data':\n raise ValueError(\"Invalid content type.\")\n for ct_param in ct_params:\n ct_tpl = ct_param.split(b'=')\n if len(ct_tpl) != 2: continue\n if ct_tpl[0].obj == b'boundary':\n self._boundary = b'--' + ct_tpl[1].obj.tobytes()\n break\n break\n if self._boundary is None:\n raise ValueError(\"Header content-type not found or valid.\")\n\n def _find_boundary_end(self, newchunk, start_from = 0, boundary_str = True, pre_crlf = 0, post_crlf = 0):\n \"\"\":return Index of the character after the last character of the first occurrence of the boundary string,\n or -1 if the boundary string is not found\"\"\"\n boundary = b''.join([self._CRLF for i in range(pre_crlf)] +\n [self._boundary if boundary_str else b''] +\n [self._CRLF for i in range(post_crlf)])\n\n if start_from == 0:\n if self._old_chunk is not None:\n # first, we perform a check to ensure that the boundary that we are after isn't chunked\n # this assumes that each chunk is larger that the boundary string\n # find last letter that matches the boundary string\n last_letter = boundary[-1]\n for i in range(min(len(newchunk), len(boundary)) - 1, -1, -1):\n if newchunk[i] == last_letter:\n newchunk_portion = newchunk[:i+1]\n if boundary.endswith(newchunk_portion.obj):\n # we join the old chunk with the new one, and ensure that it ends with the boundary string\n if (self._old_chunk.obj.obj + bytes(newchunk_portion)).endswith(boundary):\n # then this is the boundary!\n return i + 1\n # let's find a regular occurrence\n idx = newchunk.find(boundary, start_from)\n return -1 if idx < 0 else idx + len(boundary)\n\n\n def _parse_cd_str(self):\n \"\"\"Parse the complete content-disposition subheader stored in _cd_str\"\"\"\n ret = {'field_name':None, 'file_name':None}\n lines = self._cd_str.split(b'\\r\\n')\n for line in lines:\n splt = line.split(b\": \")\n if len(splt) >= 2 and splt[0] == b'Content-Disposition':\n splt = splt[1].split(b\"; \")\n if len(splt) >= 2 and splt[0] == b'form-data':\n for i in range(1, len(splt)):\n splt2 = splt[i].split(b'=')\n if splt2[0] == b'name' and len(splt2) == 2:\n # always assume quoted string\n ret['field_name'] = splt2[1][1:-1].obj.tobytes()\n elif splt2[0] == b'filename' and len(splt2) == 2:\n # always assume quoted string\n ret['file_name'] = splt2[1][1:-1].obj.tobytes()\n # We've already found the data we need. No point to proceed any further.\n if ret['field_name'] is not None:\n # default timestamp based file name if not specified by client\n if ret['file_name'] is None or len(ret['file_name'].strip()) == 0:\n ret['file_name'] = f\"upload-{int(time.time())}.dat\".encode('UTF-8')\n break\n return ret\n\n def add_chunk(self, chunk):\n if self._state == _States.FINISHED:\n # no more data can't be processed. please create a new instance\n return False\n if len(chunk) == 0:\n return True\n\n mv_chunk = MemoryViewWrapper(chunk)\n start_idx = 0\n\n while True:\n if self._state == _States.READY:\n # look for the boundary, so we can determine the content-disposition header\n start_idx = self._find_boundary_end(mv_chunk, start_idx, True, 0, 1)\n if start_idx >= 0:\n self._cd_str = b''\n self._state = _States.CD_HDR\n else:\n # proceed on the next chunk\n break\n elif self._state == _States.CD_HDR:\n # here, we will parse the content-disposition (sub)header\n prev_start_idx = start_idx\n boundary_size = 4\n start_idx = self._find_boundary_end(mv_chunk, start_idx, False, 2)\n if start_idx >= 0:\n self._cd_str += bytes(mv_chunk[prev_start_idx:max(0, start_idx - boundary_size)])\n self._cd_str = MemoryViewWrapper(self._cd_str)\n # ensure that we don't include a part of the boundary if the boundary is truncated\n if start_idx < boundary_size: self._cd_str = self._cd_str[:start_idx - boundary_size]\n hdr_parse = self._parse_cd_str()\n # be ready to open the file for writing if we encountered the correct form field\n self._fh = open(os.path.join(self._path, hdr_parse['file_name'].decode()), 'wb') if hdr_parse['field_name'] == self._look_for else None\n self._state = _States.DATA\n self._saved_data_chunk = None\n else:\n self._cd_str += bytes(mv_chunk[prev_start_idx:])\n # proceed to the next chunk\n break\n elif self._state == _States.DATA:\n # a boundary is the end of data\n prev_start_idx = start_idx\n start_idx = self._find_boundary_end(mv_chunk, start_idx, True, 1)\n # begin processing and writing only if _fh is assigned\n if self._fh is not None:\n boundary_size = len(self._boundary) + 2\n if start_idx < 0:\n # this means that we couldn't find an exact match for the boundary\n # if there is a saved chunk, it is certain that the chunk contains only data, no boundary\n if self._saved_data_chunk is not None:\n self._fh.write(self._saved_data_chunk.obj)\n self._saved_data_chunk = None\n # we save this chunk first, in case the last part of the chunk contains a part of the boundary\n self._saved_data_chunk = mv_chunk[prev_start_idx:]\n else:\n # write the previous chunk if exist\n if self._saved_data_chunk is not None:\n # ensure that we don't include the partial boundary data stored here\n if start_idx < boundary_size:\n self._saved_data_chunk = self._saved_data_chunk[:start_idx - boundary_size]\n self._fh.write(self._saved_data_chunk.obj)\n self._saved_data_chunk = None\n # data from current (last!) chunk\n if start_idx > boundary_size:\n self._fh.write(mv_chunk[prev_start_idx:start_idx - boundary_size].obj)\n if start_idx >= 0:\n if self._fh is not None:\n self._fh.close()\n self._fh = None\n self._data_end_marker = bytearray()\n self._state = _States.DATA_END\n else:\n # proceed to the next chunk\n break\n elif self._state == _States.DATA_END:\n ctr = 0\n while start_idx < len(mv_chunk):\n self._data_end_marker.append(mv_chunk[start_idx])\n start_idx += 1\n if len(self._data_end_marker) == 2 and self._data_end_marker == b'\\r\\n':\n # we still have more fields to process\n self._cd_str = b''\n self._state = _States.CD_HDR\n # proceed to the next iteration in this chunk\n break\n elif len(self._data_end_marker) == 4:\n if self._data_end_marker == b'--\\r\\n':\n # done\n return True\n else:\n # consider the data invalid\n return False\n\n # save the old chunk for truncated boundary\n self._old_chunk = mv_chunk\n" }, { "alpha_fraction": 0.7777777910232544, "alphanum_fraction": 0.7854785323143005, "avg_line_length": 66.37036895751953, "blob_id": "8548e063abad282f65b397e5df3f2f98d40600d0", "content_id": "53e53e247cd7c6fc9b32c860c9833cd5cecfb633", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1818, "license_type": "permissive", "max_line_length": 268, "num_lines": 27, "path": "/README.md", "repo_name": "jamestut/simple-web-server", "src_encoding": "UTF-8", "text": "# Simple Uvicorn Based Web Server\n## Overview\nThis simple HTTP 1.1 static web server is written in Python 3 utilizing the Uvicorn library for the HTTP server component.\n\nThis program requires CPython 3.5 or newer and uvicorn (e.g. `pip3 install uvicorn` if on Ubuntu). Only tested in Linux systems (including Microsoft WSL).\n\n## Features\n- Directory listing and file download.\n- File download, with `Content-Length` header and CORS header (suitable for hosting static JSON files).\n- File upload, utilizing `POST` method and `multipart/form-data` body format.\n- Built-in `multipart/form-data` that works with chunked data streams.\n- Streams both upload and download, suitable for transferring large files.\n- More than 4GB file support.\n- Simultaneous transfer in both directions.\n- Path canonicalization.\n\n## Known Issues\n- File names with non ASCII unicode characters doesn't work.\n\nPlease note that this application is a very simple program, therefore no extensive performance and security testing has been performed. There isn't any guarantee of protection against security breach, denial of service attacks, etc.\n\n## Usage\nSimply run the `simplewebserver.py` from the `src` folder using Python 3 interpreter. Ensure that the other python files in the `src` folder from this repository accompany the main python program.\n\nAnother way to run this program is by renaming `simplewebserver.py` to `__main__.py`, zip archive the program with the other python files from the `src` folder, and then execute the zip file directly using the Python 3 interpreter (e.g. `python3 simplewebserver.zip`).\n\nBy default, the program will serve files and folders in the current working directory for all hosts at port 8080. Use the command line switch to change those parameters. For more info, run the program with `--help` argument." } ]
4
Zahidsqldba07/codinggame-4
https://github.com/Zahidsqldba07/codinggame-4
e6e187ee3ff1851837663f40a574f5a35cb5feb2
213c1bdf416f2b962049e2415fe922b979040507
be80dce87733697b2c9b8b3ea011237c323685dc
refs/heads/master
2023-03-18T22:46:42.853451
2018-09-16T12:07:24
2018-09-16T12:07:24
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.4374034106731415, "alphanum_fraction": 0.4374034106731415, "avg_line_length": 23.884614944458008, "blob_id": "7b825e275e528d0be2a81a2b07ed9bbcbccefaf6", "content_id": "14af429f5c54dbdabf3257ed54ef5a156199985d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 647, "license_type": "no_license", "max_line_length": 75, "num_lines": 26, "path": "/helpers/imports.py", "repo_name": "Zahidsqldba07/codinggame-4", "src_encoding": "UTF-8", "text": "import sys\nimport math\n\n\ndef debug(*args, **kwargs):\n parts = [\n \"{}\".format(arg) for arg in args\n ] + [\n \"{}={}\".format(key, value) for key, value in kwargs.items()\n ]\n print(\"DEBUG: \" + \" -- \".join(parts), file=sys.stderr)\n\n\nclass O:\n def __init__(self, **kwargs):\n for key, value in kwargs.items():\n setattr(self, key, value)\n\n def __repr__(self):\n return \"O({})\".format(\n \", \".join(\n \"{}={}\".format(key, getattr(self, key))\n for key in dir(self)\n if not key.startswith(\"_\")\n )\n )\n" } ]
1
diegofau/domainextractor
https://github.com/diegofau/domainextractor
8847d33214598285fd3ad97506b9180e872e1e6b
ad641c11c7e01ebe8a61f2ecbdd0de798bf3ece6
1c1c647b23d90091d93a507203726494ced7ea28
refs/heads/master
2016-09-26T15:18:04.601853
2016-09-13T11:35:18
2016-09-13T11:35:18
68,102,546
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6363636255264282, "alphanum_fraction": 0.6661157011985779, "avg_line_length": 23.200000762939453, "blob_id": "e35a882dfc79429f6c7db9aee3bd4ea36627d7d1", "content_id": "88ea606752b215cfaacb3954e109a3decde67280", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 605, "license_type": "no_license", "max_line_length": 61, "num_lines": 25, "path": "/domainextractor.py", "repo_name": "diegofau/domainextractor", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport requests\nimport json\nimport time\nimport sys\nip = sys.argv[1]\nurl = \"https://api.cognitive.microsoft.com/bing/v5.0/search/\"\nh={\"Ocp-Apim-Subscription-Key\": \"BingAPI\"}\nc=50\no=0\nvalidador = True\nwhile validador:\n\tp={'q': \"ip:{0}\".format(ip), 'count':c, 'offset':o}\n\tresults = requests.get(url, params=p, headers=h)\n\tif results.status_code == 200:\n\t\tdatos=json.loads(results.text)\n\t\tfor i in datos['webPages']['value']:\n\t\t\tprint i['displayUrl'].split(\"/\")[0]\n\t\tc+=50\n\t\to+=50\n\t\ttime.sleep(6)\n\t\tif len(datos['webPages']['value']) != 50:\n\t\t\tvalidador=False\n\telse:\n\t\tvalidador=False\n" }, { "alpha_fraction": 0.8163265585899353, "alphanum_fraction": 0.8163265585899353, "avg_line_length": 23.5, "blob_id": "f42fe179367773b322240b2ba8b7e31b02a72177", "content_id": "1337aac7dc661502d069b9fd3b5abd4165d853d7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 49, "license_type": "no_license", "max_line_length": 30, "num_lines": 2, "path": "/README.md", "repo_name": "diegofau/domainextractor", "src_encoding": "UTF-8", "text": "# domainextractor\nExtract the domain from one IP\n" } ]
2
GothenburgBitFactory/holidata
https://github.com/GothenburgBitFactory/holidata
02a2cc353b2b4efa8da261bbabe2be3fcdcec600
fdd02cd2cc1769236a10dfffab647375bc27f129
5200c7f1ddce92c58767d86431bb5c8400d84c5a
refs/heads/master
2023-08-17T05:28:20.586222
2023-04-24T20:27:30
2023-04-24T20:27:30
167,205,573
45
20
MIT
2019-01-23T15:27:32
2023-08-19T18:03:31
2023-09-11T05:54:40
Python
[ { "alpha_fraction": 0.3322019577026367, "alphanum_fraction": 0.3763258457183838, "avg_line_length": 21.245283126831055, "blob_id": "18ca69c8fc33a6e928a1fd0932157b4a7286c8a3", "content_id": "ef60739f7e3e790cdcfcd1c04658caa07bfb121a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2378, "license_type": "permissive", "max_line_length": 59, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pl_PL-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Nowy Rok',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-01-06',\n 'description': 'Trzech Króli',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-04-20',\n 'description': 'Wielkanoc',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Poniedziałek Wielkanocny',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Święto Pracy',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-03',\n 'description': 'Święto Konstytucji Trzeciego Maja',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-06-08',\n 'description': 'Zielone Świątki',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-19',\n 'description': 'Boże Ciało',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-08-15',\n 'description': 'Wniebowzięcie Najświętszej Maryi Panny',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Wszystkich Świętych',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-11-11',\n 'description': 'Narodowe Święto Niepodległości',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Boże Narodzenie (pierwszy dzień)',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Boże Narodzenie (drugi dzień)',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.6002522110939026, "alphanum_fraction": 0.6620428562164307, "avg_line_length": 26.34482765197754, "blob_id": "541e60c681b6ddfe298d09b49d22a247e2c6351e", "content_id": "fff3eea772b0c5c615981fb65f9140c2a9c7d779", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 801, "license_type": "permissive", "max_line_length": 53, "num_lines": 29, "path": "/src/holidata/holidays/nb-NO.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom .holidays import Locale\n\n\nclass nb_NO(Locale):\n \"\"\"\n 01-01: [NF] Nyttårsdag\n 05-01: [NF] Offentlig Høytidsdag\n 05-08: [NF] Frigjøringsdag 1945\n 05-17: [NF] Grunnlovsdag\n 12-24: [NRF] Julaften\n 12-25: [NRF] Juledag\n 12-26: [NRF] Juledag\n 12-31: [NF] Nyttårsaften\n 49 days before Easter: [NRV] Fastelavn\n 7 days before Easter: [NRV] Palmesøndag\n 3 days before Easter: [NRV] Skjærtorsdag\n 2 days before Easter: [NRV] Langfredag\n Easter: [NRV] Påskedag\n 1 day after Easter: [NRV] Påskedag\n 39 days after Easter: [NRV] Kristi Himmelfartsdag\n 49 days after Easter: [NRV] Pinsedag\n 50 days after Easter: [NRV] Pinsedag\n \"\"\"\n\n locale = \"nb-NO\"\n easter_type = EASTER_WESTERN\n" }, { "alpha_fraction": 0.3192431628704071, "alphanum_fraction": 0.3643317222595215, "avg_line_length": 20.798246383666992, "blob_id": "2cbb8fc431d179a5ff3c9655b52b2abfae55f014", "content_id": "88a3db9dda6ce0d218ed384d226038b6ffe04316", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2495, "license_type": "permissive", "max_line_length": 54, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': 'Ano Novo',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-03-05',\n 'description': 'Carnaval',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Sexta-feira Santa',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-21',\n 'description': 'Páscoa',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-25',\n 'description': 'Dia da Liberdade',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Dia do Trabalhador',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Dia de Portugal',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-06-20',\n 'description': 'Corpo de Deus',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-08-15',\n 'description': 'Assunção de Nossa Senhora',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-10-05',\n 'description': 'Implantação da República',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-11-01',\n 'description': 'Dia de Todos os Santos',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-12-01',\n 'description': 'Restauração da Independência',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-12-08',\n 'description': 'Imaculada Conceição',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Natal',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3314873278141022, "alphanum_fraction": 0.37579113245010376, "avg_line_length": 21.1842098236084, "blob_id": "33c85a095244b41059aa22c346e75d930178027e", "content_id": "3a5bba510ae26f380403991bee7843437e2425c6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2768, "license_type": "permissive", "max_line_length": 93, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': 'Πρωτοχρονιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-01-06',\n 'description': 'Θεοφάνεια',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-03-11',\n 'description': 'Καθαρά Δευτέρα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-03-25',\n 'description': 'Ευαγγελισμός της Θεοτόκου και Εθνική Ημέρα Ανεξαρτησίας της Ελλάδας',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-04-26',\n 'description': 'Μεγάλη Παρασκευή',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-27',\n 'description': 'Μεγάλο Σάββατο',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-28',\n 'description': 'Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-29',\n 'description': 'Δευτέρα του Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Πρωτομαγιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-06-17',\n 'description': 'Δευτέρα του Αγίου Πνεύματος',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-08-15',\n 'description': 'Κοίμηση της Θεοτόκου',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-10-28',\n 'description': 'Ημέρα του ΌΧΙ',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Χριστούγεννα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Επόμενη ημέρα Χριστουγέννων',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.5799999833106995, "alphanum_fraction": 0.5799999833106995, "avg_line_length": 15.666666984558105, "blob_id": "0812c443cdaa5d7c1d61e90b9a2b7969a301eb07", "content_id": "33cdf471eadd40cedd1c59f7d8732f775a70c6ea", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 100, "license_type": "permissive", "max_line_length": 34, "num_lines": 6, "path": "/src/holidata/holidays/BE.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass BE(Country):\n id = \"BE\"\n languages = [\"de\", \"fr\", \"nl\"]\n" }, { "alpha_fraction": 0.31142035126686096, "alphanum_fraction": 0.3574855923652649, "avg_line_length": 20.275510787963867, "blob_id": "d186a493ed1b6116762fb00460b7de08fefce01d", "content_id": "645dfff38359e32368f50935b290faa4b266a0b4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2088, "license_type": "permissive", "max_line_length": 47, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Nytårsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-04-01',\n 'description': 'Skærtorsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Langfredag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-04',\n 'description': 'Påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Anden påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-30',\n 'description': 'Store bededag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-13',\n 'description': 'Kristi himmelfartsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-23',\n 'description': 'Pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Anden pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-06-05',\n 'description': 'Grundlovsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Anden juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3056092858314514, "alphanum_fraction": 0.3520309329032898, "avg_line_length": 20.112245559692383, "blob_id": "5a428336f001e9d1f730b3cf6d003c84228583dd", "content_id": "9b9fbf5c0fce44f49dca576109cf0b7593e57ce2", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2068, "license_type": "permissive", "max_line_length": 42, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-03-31',\n 'description': 'Ostern',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Ostermontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-09',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-19',\n 'description': 'Pfingsten',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-20',\n 'description': 'Pfingstmontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-07-21',\n 'description': 'Nationalfeiertag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-11-11',\n 'description': 'Waffenstillstand',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-12-25',\n 'description': 'Weihnacht',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31093543767929077, "alphanum_fraction": 0.3601229786872864, "avg_line_length": 20.49056625366211, "blob_id": "931f350e0a301f61bf8d6f5354ce63b53a95f6d2", "content_id": "7bbdb40b2b235c242304e39d9c37125f90477f0f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2301, "license_type": "permissive", "max_line_length": 54, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': 'Újév',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-03-15',\n 'description': 'Az 1848-as forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-04-07',\n 'description': 'Nagypéntek',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-09',\n 'description': 'Húsvét',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-10',\n 'description': 'Húsvéthétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-01',\n 'description': 'A munka ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-28',\n 'description': 'Pünkösd',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-29',\n 'description': 'Pünkösdhétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-08-20',\n 'description': 'Az államalapítás ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-10-23',\n 'description': 'Az 1956-os forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-11-01',\n 'description': 'Mindenszentek',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-26',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3381712734699249, "alphanum_fraction": 0.3817126154899597, "avg_line_length": 21.598360061645508, "blob_id": "44ca79268d85b5d1b2e54e5c53064efb3253f3d8", "content_id": "f6376bc5ec5993fccf3d928beb4356f5f643f35c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2790, "license_type": "permissive", "max_line_length": 58, "num_lines": 122, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': 'Deň vzniku Slovenskej republiky',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-01-06',\n 'description': 'Zjavenie Pána / Traja králi',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-04-03',\n 'description': 'Veľký piatok',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-06',\n 'description': 'Veľkonočný pondelok',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-01',\n 'description': 'Sviatok práce',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-08',\n 'description': 'Deň víťazstva nad fašizmom',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-07-05',\n 'description': 'Sviatok svätého Cyrila a Metoda',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-08-29',\n 'description': 'Výročie SNP',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-09-01',\n 'description': 'Deň Ústavy Slovenskej republiky',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-09-15',\n 'description': 'Sedembolestná Panna Mária',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-11-01',\n 'description': 'Sviatok všetkých svätých',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-11-17',\n 'description': 'Deň boja za slobodu a demokraciu',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-24',\n 'description': 'Štedrý deň',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Prvý sviatok vianočný',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-26',\n 'description': 'Druhý sviatok vianočný',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.34407365322113037, "alphanum_fraction": 0.3878020644187927, "avg_line_length": 21.877193450927734, "blob_id": "f1b01da073f8c27cc319c1a537d987f521b72e96", "content_id": "5f2d2591b45bc25b1642acd5ccc6b561868a4aea", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2650, "license_type": "permissive", "max_line_length": 88, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2020] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2020-01-01',\n 'description': 'Nový rok',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-01-01',\n 'description': 'Den obnovy samostatného českého státu',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-04-10',\n 'description': 'Velký pátek',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Velikonoční pondělí',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-01',\n 'description': 'Svátek práce',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-08',\n 'description': 'Den vítězství',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-07-05',\n 'description': 'Den slovanských věrozvěstů Cyrila a Metoděje',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-07-06',\n 'description': 'Den upálení mistra Jana Husa',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-09-28',\n 'description': 'Den české státnosti',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-10-28',\n 'description': 'Den vzniku samostatného československého státu',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-11-17',\n 'description': 'Den boje za svobodu a demokracii a Mezinárodní den studentstva',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-12-24',\n 'description': 'Štědrý den',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-25',\n 'description': '1. svátek vánoční',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-26',\n 'description': '2. svátek vánoční',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3136601746082306, "alphanum_fraction": 0.3593526780605316, "avg_line_length": 20.4489803314209, "blob_id": "1f6f5e8c164882453823ac284912ab8f040f8962", "content_id": "1244daa53bc895ea7181dca99feeadc4a7cf4efa", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2101, "license_type": "permissive", "max_line_length": 49, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[it_IT-2012] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2012-01-01',\n 'description': 'Capodanno',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-01-06',\n 'description': 'Epifania',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-04-08',\n 'description': 'Pasqua',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-09',\n 'description': 'Pasquetta',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-25',\n 'description': 'Festa della liberazione',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-01',\n 'description': 'Festa del lavoro',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-06-02',\n 'description': 'Festa della repubblica',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-08-15',\n 'description': 'Assunzione (ferragosto)',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-11-01',\n 'description': 'Ognissanti',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-08',\n 'description': 'Immacolata concezione',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-25',\n 'description': 'Natale',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-26',\n 'description': 'S.to Stefano',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31116390228271484, "alphanum_fraction": 0.3605700731277466, "avg_line_length": 20.489795684814453, "blob_id": "01b32d918603ca94d6b427810249333ae82d61a9", "content_id": "c0786f11d12cbc94515d119a454c7faac11a1067", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2128, "license_type": "permissive", "max_line_length": 54, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': 'Újév',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-03-15',\n 'description': 'Az 1848-as forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-03-31',\n 'description': 'Húsvét',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Húsvéthétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-01',\n 'description': 'A munka ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-19',\n 'description': 'Pünkösd',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-20',\n 'description': 'Pünkösdhétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-08-20',\n 'description': 'Az államalapítás ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-10-23',\n 'description': 'Az 1956-os forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-11-01',\n 'description': 'Mindenszentek',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-25',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-26',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31541725993156433, "alphanum_fraction": 0.36067891120910645, "avg_line_length": 20.439393997192383, "blob_id": "0fc42e7cf3e5cd38d51144f157ca901c5850b949", "content_id": "a1469ada4ad0e474534ceb308a11f272459daa91", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1414, "license_type": "permissive", "max_line_length": 48, "num_lines": 66, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2018] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2018-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-03-30',\n 'description': 'Good Friday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-02',\n 'description': 'Easter Monday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-05-07',\n 'description': 'Early May Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-05-28',\n 'description': 'Spring Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-08-27',\n 'description': 'August Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-12-26',\n 'description': 'Boxing Day',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.2920454442501068, "alphanum_fraction": 0.2920454442501068, "avg_line_length": 10.733333587646484, "blob_id": "7493ac5144953498d224e1860db6336cff7c84bb", "content_id": "3e8f3b953197650ed04b21e93409fa87d3669966", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 880, "license_type": "permissive", "max_line_length": 37, "num_lines": 75, "path": "/src/holidata/holidays/__init__.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Locale, Country\n\n__all__ = [\n \"Country\",\n \"AT\",\n \"BE\",\n \"BR\",\n \"CA\",\n \"CA\",\n \"CH\",\n \"CO\",\n \"CZ\",\n \"DE\",\n \"DK\",\n \"EE\",\n \"ES\",\n \"FI\",\n \"FI\",\n \"FR\",\n \"GB\",\n \"GR\",\n \"HR\",\n \"HU\",\n \"IS\",\n \"IT\",\n \"NL\",\n \"NO\",\n \"NZ\",\n \"PL\",\n \"PT\",\n \"RU\",\n \"SE\",\n \"SI\",\n \"SK\",\n \"TR\",\n \"US\",\n \"ZA\",\n \"Locale\",\n \"cs-CZ\",\n \"da-DK\",\n \"de-AT\",\n \"de-BE\",\n \"de-CH\",\n \"de-DE\",\n \"el-GR\",\n \"en-CA\",\n \"en-GB\",\n \"en-NZ\",\n \"en-US\",\n \"en-ZA\",\n \"es-CO\",\n \"es-ES\",\n \"es-US\",\n \"et-EE\",\n \"fi-FI\",\n \"fr-BE\",\n \"fr-CA\",\n \"fr-FR\",\n \"hr-HR\",\n \"hu-HU\",\n \"is-IS\",\n \"it-IT\",\n \"nb-NO\",\n \"nl-BE\",\n \"nl-NL\",\n \"pl-PL\",\n \"pt-BR\",\n \"pt-PT\",\n \"ru-RU\",\n \"sk-SK\",\n \"sl-SI\",\n \"sv-FI\",\n \"sv-SE\",\n \"tr-TR\",\n]\n" }, { "alpha_fraction": 0.3136601746082306, "alphanum_fraction": 0.3593526780605316, "avg_line_length": 20.4489803314209, "blob_id": "e653ed6afa99f9022b74cbd2446351c6cd426093", "content_id": "b039c926271c23a543377889617b8338227f1b9f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2101, "license_type": "permissive", "max_line_length": 49, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[it_IT-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Capodanno',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-01-06',\n 'description': 'Epifania',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-04-20',\n 'description': 'Pasqua',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Pasquetta',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-25',\n 'description': 'Festa della liberazione',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Festa del lavoro',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-06-02',\n 'description': 'Festa della repubblica',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-08-15',\n 'description': 'Assunzione (ferragosto)',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Ognissanti',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-08',\n 'description': 'Immacolata concezione',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Natale',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'S.to Stefano',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.30265551805496216, "alphanum_fraction": 0.34921789169311523, "avg_line_length": 20.153846740722656, "blob_id": "9973cb2c6d0b533cb290da401b4063cd1e1c3b6e", "content_id": "0b72d820a4aad9acbfd911e54c5da832c4318f4f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2753, "license_type": "permissive", "max_line_length": 42, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Nyårsdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-01-06',\n 'description': 'Trettondedag jul',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Långfredagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-17',\n 'description': 'Påskdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Annandag påsk',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Första maj',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-26',\n 'description': 'Kristi himmelsfärdsdag',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-05',\n 'description': 'Pingstdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Nationaldagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-06-24',\n 'description': 'Midsommarafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-06-25',\n 'description': 'Midsommardagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-11-05',\n 'description': 'Alla helgons dag',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-12-24',\n 'description': 'Julafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Juldagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Annandag jul',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-31',\n 'description': 'Nyårsafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3056092858314514, "alphanum_fraction": 0.3520309329032898, "avg_line_length": 20.112245559692383, "blob_id": "160a36236939fe5d503dab4a5918e9ffd8295322", "content_id": "2858d9d209208e099d204754a8f5dc77abe78f7b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2068, "license_type": "permissive", "max_line_length": 42, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2020] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2020-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-04-12',\n 'description': 'Ostern',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Ostermontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-21',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-31',\n 'description': 'Pfingsten',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-06-01',\n 'description': 'Pfingstmontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-07-21',\n 'description': 'Nationalfeiertag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-11-11',\n 'description': 'Waffenstillstand',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-12-25',\n 'description': 'Weihnacht',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3200301229953766, "alphanum_fraction": 0.3652108311653137, "avg_line_length": 20.778688430786133, "blob_id": "685418d964e39e2e5198c9b7f2dbb43789583c4a", "content_id": "486f641a59070c395e812911a9ad29102bccfe7d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2663, "license_type": "permissive", "max_line_length": 56, "num_lines": 122, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': 'Novo leto',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-01-02',\n 'description': 'Novo leto',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-02-08',\n 'description': 'Prešernov dan',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-04-09',\n 'description': 'Velikonočna nedelja',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-10',\n 'description': 'Velikonočni ponedeljek',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-27',\n 'description': 'Dan upora proti okupatorju',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-01',\n 'description': 'Praznik dela',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-02',\n 'description': 'Praznik dela',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-29',\n 'description': 'Binkošti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-06-25',\n 'description': 'Dan državnosti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-08-15',\n 'description': 'Marijino vnebovzetje',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-10-31',\n 'description': 'Dan reformacije',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-11-01',\n 'description': 'Dan spomina na mrtve',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Božič',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-12-26',\n 'description': 'Dan samostojnosti in enotnosti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.32159623503685, "alphanum_fraction": 0.36760562658309937, "avg_line_length": 20.744897842407227, "blob_id": "7620f80a4859c82c362c26cfb85a05d870928a20", "content_id": "ec1e04c8df59fb35872b75637021ca3a0f217b07", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2149, "license_type": "permissive", "max_line_length": 67, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Uusaasta',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-02-24',\n 'description': 'Iseseisvuspäev, Eesti Vabariigi aastapäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Suur reede',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-20',\n 'description': 'Ülestõusmispühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Kevadpüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-06-08',\n 'description': 'Nelipühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-23',\n 'description': 'Võidupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-06-24',\n 'description': 'Jaanipäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-08-20',\n 'description': 'Taasiseseisvumispäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-12-24',\n 'description': 'Jõululaupäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Esimene jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Teine jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3381712734699249, "alphanum_fraction": 0.3817126154899597, "avg_line_length": 21.598360061645508, "blob_id": "c0e935ed4016cca77e011a63a7be7e195761e7b9", "content_id": "887b2139291e8c2dcff729503edb3683c29e043a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2790, "license_type": "permissive", "max_line_length": 58, "num_lines": 122, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': 'Deň vzniku Slovenskej republiky',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-01-06',\n 'description': 'Zjavenie Pána / Traja králi',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-03-25',\n 'description': 'Veľký piatok',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-28',\n 'description': 'Veľkonočný pondelok',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-01',\n 'description': 'Sviatok práce',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-08',\n 'description': 'Deň víťazstva nad fašizmom',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-07-05',\n 'description': 'Sviatok svätého Cyrila a Metoda',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-08-29',\n 'description': 'Výročie SNP',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-09-01',\n 'description': 'Deň Ústavy Slovenskej republiky',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-09-15',\n 'description': 'Sedembolestná Panna Mária',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-11-01',\n 'description': 'Sviatok všetkých svätých',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-11-17',\n 'description': 'Deň boja za slobodu a demokraciu',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-12-24',\n 'description': 'Štedrý deň',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-25',\n 'description': 'Prvý sviatok vianočný',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'Druhý sviatok vianočný',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.32057416439056396, "alphanum_fraction": 0.3652312457561493, "avg_line_length": 21.008771896362305, "blob_id": "7e7a970ba73475b6f2f4db69bf9d3d967e97d70a", "content_id": "b03eadd44df7320a7268ef2b19b7afe6cb19683f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2524, "license_type": "permissive", "max_line_length": 58, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_US-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Año Neuvo',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-01-18',\n 'description': 'Cumpleaños de Martin Luther King, Jr.',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-02-15',\n 'description': 'Día del Presidente',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-04-19',\n 'description': 'Día del Patriota',\n 'locale': 'es-US',\n 'notes': '',\n 'region': 'MA',\n 'type': 'V'\n },\n {\n 'date': '2021-04-19',\n 'description': 'Día del Patriota',\n 'locale': 'es-US',\n 'notes': '',\n 'region': 'ME',\n 'type': 'V'\n },\n {\n 'date': '2021-05-31',\n 'description': 'Día de los Caídos',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-07-04',\n 'description': 'Día de la Independiencia',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-09-06',\n 'description': 'Día del Trabajo',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-10-11',\n 'description': 'Día de Columbus',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-11-11',\n 'description': 'Día de los Veteranos',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-11-25',\n 'description': 'Día de Acción de Gracias',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-11-26',\n 'description': 'Día después de Acción de Gracias',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-12-24',\n 'description': 'Nochebuena',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Navidad',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31116390228271484, "alphanum_fraction": 0.3605700731277466, "avg_line_length": 20.489795684814453, "blob_id": "ed81a49fe81bbf5b33b494367394d5f4e402b9a2", "content_id": "9ad682ccf9f65ea7e0c9798344a4fa4ff286fadf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2128, "license_type": "permissive", "max_line_length": 54, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': 'Újév',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-03-15',\n 'description': 'Az 1848-as forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-04-24',\n 'description': 'Húsvét',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-25',\n 'description': 'Húsvéthétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-05-01',\n 'description': 'A munka ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-12',\n 'description': 'Pünkösd',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-13',\n 'description': 'Pünkösdhétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-08-20',\n 'description': 'Az államalapítás ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-10-23',\n 'description': 'Az 1956-os forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-11-01',\n 'description': 'Mindenszentek',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-26',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31142035126686096, "alphanum_fraction": 0.3574855923652649, "avg_line_length": 20.275510787963867, "blob_id": "6672e6118bf5f7fdd93ccab7a7da26fcf2a2371d", "content_id": "05d83c2d1df73fc153f58ee6dfd9949207ab05a3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2088, "license_type": "permissive", "max_line_length": 47, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': 'Nytårsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Skærtorsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-25',\n 'description': 'Langfredag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-27',\n 'description': 'Påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-28',\n 'description': 'Anden påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-04-22',\n 'description': 'Store bededag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-05',\n 'description': 'Kristi himmelfartsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-15',\n 'description': 'Pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-16',\n 'description': 'Anden pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-06-05',\n 'description': 'Grundlovsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-12-25',\n 'description': 'Juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'Anden juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.6234375238418579, "alphanum_fraction": 0.667187511920929, "avg_line_length": 25.66666603088379, "blob_id": "e64bf5c4841831142c86217c952c17d125119079", "content_id": "73ca7c9e2d8170f09e657b6d7e2b4f384f6ce347", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 644, "license_type": "permissive", "max_line_length": 53, "num_lines": 24, "path": "/src/holidata/holidays/da-DK.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom .holidays import Locale\n\n\nclass da_DK(Locale):\n \"\"\"\n 01-01: [NF] Nytårsdag\n 06-05: [NF] Grundlovsdag\n 12-25: [NRF] Juledag\n 12-26: [NRF] Anden juledag\n 3 days before Easter: [NRV] Skærtorsdag\n 2 days before Easter: [NRV] Langfredag\n Easter: [NRV] Påskedag\n 1 day after Easter: [NRV] Anden påskedag\n 26 days after Easter: [NRV] Store bededag\n 39 days after Easter: [NRV] Kristi himmelfartsdag\n 49 days after Easter: [NRV] Pinsedag\n 50 days after Easter: [NRV] Anden pinsedag\n \"\"\"\n\n locale = \"da-DK\"\n easter_type = EASTER_WESTERN\n" }, { "alpha_fraction": 0.30687829852104187, "alphanum_fraction": 0.35305434465408325, "avg_line_length": 20.224489212036133, "blob_id": "5f1858c8a9097502c9a9e6ffadacc79b5c322ef5", "content_id": "cf101007915a3f0cc99d334b546f18a4fa4136ce", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2086, "license_type": "permissive", "max_line_length": 45, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2020] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2020-01-01',\n 'description': 'Nouvel An',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-04-12',\n 'description': 'Pâques',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-01',\n 'description': 'Fête du Travail',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-21',\n 'description': 'Ascension',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-31',\n 'description': 'Pentecôte',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-06-01',\n 'description': 'Lundi de Pentecôte',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-07-21',\n 'description': 'Fête nationale',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-08-15',\n 'description': 'Assomption',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-11-01',\n 'description': 'Toussaint',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-11-11',\n 'description': \"Jour de l'armistice\",\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-12-25',\n 'description': 'Noël',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.32405567169189453, "alphanum_fraction": 0.36858847737312317, "avg_line_length": 21.070175170898438, "blob_id": "ad059f219a08ff758d5c47783ae0c66f21657088", "content_id": "45598a9208de3356c302e5bc00ce7124dadc2b4b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2515, "license_type": "permissive", "max_line_length": 55, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-03-21',\n 'description': 'Human Rights Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-03-22',\n 'description': 'Human Rights Day (Supplement)',\n 'locale': 'en-ZA',\n 'notes': 'Supplement holiday',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Good Friday',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Family Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-27',\n 'description': 'Freedom Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-01',\n 'description': \"Worker's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-06-16',\n 'description': 'Youth Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-08-09',\n 'description': \"National Women's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-09-24',\n 'description': 'Heritage Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-16',\n 'description': 'Day of Reconciliation',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Day of Goodwill',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-27',\n 'description': 'Day of Goodwill (Supplement)',\n 'locale': 'en-ZA',\n 'notes': 'Supplement holiday',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.42105263471603394, "alphanum_fraction": 0.42105263471603394, "avg_line_length": 32.77777862548828, "blob_id": "e9eb50ca88193ff5311ab4c07b4ea79b6e746e31", "content_id": "7ff56d9db34ce8d0574ee835e82e8f4e29f634be", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 304, "license_type": "permissive", "max_line_length": 116, "num_lines": 9, "path": "/src/holidata/holidays/BR.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass BR(Country):\n id = \"BR\"\n languages = [\"pt\"]\n default_lang = \"pt\"\n regions = [\"AC\", \"AL\", \"AP\", \"AM\", \"BA\", \"CE\", \"DF\", \"ES\", \"GO\", \"MA\", \"MT\", \"MS\", \"MG\", \"PA\", \"PB\", \"PR\", \"PE\",\n \"PI\", \"RJ\", \"RN\", \"RS\", \"RO\", \"RR\", \"SC\", \"SP\", \"SE\", \"TO\"]\n" }, { "alpha_fraction": 0.5982142686843872, "alphanum_fraction": 0.5982142686843872, "avg_line_length": 15, "blob_id": "86944743fdf2c51f8599d0dbf503bbad3da40a93", "content_id": "8c0284343c63cf1665390fbcae14ea4f91b7c4f4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 112, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/RU.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass RU(Country):\n id = \"RU\"\n languages = [\"ru\"]\n default_lang = \"ru\"\n" }, { "alpha_fraction": 0.31389227509498596, "alphanum_fraction": 0.35925474762916565, "avg_line_length": 20.66666603088379, "blob_id": "d3a45a786913a564810dda37db1a4377e9ea2d14", "content_id": "e93ed7cb382f81e195177de82757c15e4a9f22da", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2469, "license_type": "permissive", "max_line_length": 49, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-01-20',\n 'description': 'Birthday of Martin Luther King, Jr.',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-02-17',\n 'description': \"Washington's Birthday\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-04-21',\n 'description': \"Patriots' Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': 'MA',\n 'type': 'V'\n },\n {\n 'date': '2014-04-21',\n 'description': \"Patriots' Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': 'ME',\n 'type': 'V'\n },\n {\n 'date': '2014-05-26',\n 'description': 'Memorial Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-07-04',\n 'description': 'Independence Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-09-01',\n 'description': 'Labor Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-10-13',\n 'description': 'Columbus Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-11-11',\n 'description': 'Veterans Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-11-27',\n 'description': 'Thanksgiving Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-11-28',\n 'description': 'Day after Thanksgiving',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-12-24',\n 'description': 'Christmas Eve',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3047436475753784, "alphanum_fraction": 0.3545759320259094, "avg_line_length": 20.306121826171875, "blob_id": "0e4ef42771475c049c7ec259842bef8911525966", "content_id": "41e9803de8868b9c21e0ed4f388bb65a12e9c132", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2093, "license_type": "permissive", "max_line_length": 45, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': \"Jour de l'an\",\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-01',\n 'description': 'Fête du premier mai',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-08',\n 'description': 'Armistice 1945',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-09',\n 'description': 'Ascension',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-19',\n 'description': 'Pentecôte',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-20',\n 'description': 'Lundi de Pentecôte',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-07-14',\n 'description': 'Fête nationale',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-08-15',\n 'description': 'Assomption',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-11-01',\n 'description': 'Toussaint',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-11-11',\n 'description': 'Armistice 1918',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-12-25',\n 'description': 'Noël',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3136601746082306, "alphanum_fraction": 0.3593526780605316, "avg_line_length": 20.4489803314209, "blob_id": "d40fc17bfe30aa1b98344b6bd76d0f9ec249288c", "content_id": "c4c849f5f06d503c1b5c152827e0ce41dec0571d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2101, "license_type": "permissive", "max_line_length": 49, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[it_IT-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': 'Capodanno',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-01-06',\n 'description': 'Epifania',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-04-05',\n 'description': 'Pasqua',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-06',\n 'description': 'Pasquetta',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-25',\n 'description': 'Festa della liberazione',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-01',\n 'description': 'Festa del lavoro',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-06-02',\n 'description': 'Festa della repubblica',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-08-15',\n 'description': 'Assunzione (ferragosto)',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-11-01',\n 'description': 'Ognissanti',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-08',\n 'description': 'Immacolata concezione',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Natale',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-26',\n 'description': 'S.to Stefano',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.30687829852104187, "alphanum_fraction": 0.35305434465408325, "avg_line_length": 20.224489212036133, "blob_id": "afc0d4ddc07e3b49217ce3290a4b0c7a57a20087", "content_id": "9ccb7bd84d0180716d01952ec9b4f5fa27ffb89e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2086, "license_type": "permissive", "max_line_length": 45, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2012] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2012-01-01',\n 'description': 'Nouvel An',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-04-08',\n 'description': 'Pâques',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-09',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-01',\n 'description': 'Fête du Travail',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-17',\n 'description': 'Ascension',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-27',\n 'description': 'Pentecôte',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-28',\n 'description': 'Lundi de Pentecôte',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-07-21',\n 'description': 'Fête nationale',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-08-15',\n 'description': 'Assomption',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-11-01',\n 'description': 'Toussaint',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-11-11',\n 'description': \"Jour de l'armistice\",\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-12-25',\n 'description': 'Noël',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.7355072498321533, "alphanum_fraction": 0.7355072498321533, "avg_line_length": 24.090909957885742, "blob_id": "1e3a10a57a0d1ed48139575df1d81da74b3713b5", "content_id": "d7d9028935f85013674141443fed4e105b0b1693", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": true, "language": "Markdown", "length_bytes": 552, "license_type": "permissive", "max_line_length": 71, "num_lines": 22, "path": "/.github/ISSUE_TEMPLATE/bug_report.md", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "---\nname: Bug report\nabout: Create a report to help us improve\ntitle: 'Error in locale LOCALE[, year YEAR]'\nlabels: 'bug'\nassignees: ''\n\n---\n\n**Describe the bug** \nA clear and concise description of what the bug is.\n* Which locale is affected?\n* Which holidays are affected?\n* Which years does the bug affect?\n\n(please change `LOCALE` and optionally `YEAR` in the title accordingly)\n\n**Expected behavior** \nA clear and concise description of what the correct holiday dates are.\n\n**Additional context** \nAdd any other context about the problem here.\n" }, { "alpha_fraction": 0.3098810017108917, "alphanum_fraction": 0.35540610551834106, "avg_line_length": 20.488889694213867, "blob_id": "5655d680bac30e0a29a53c3135c9de268b25b43a", "content_id": "563bc6c3b8a5e0f9dbe3f0d519b3043a05a40235", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1933, "license_type": "permissive", "max_line_length": 51, "num_lines": 90, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-01-02',\n 'description': \"New Year's Day (observed)\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-01-03',\n 'description': \"Day after New Year's Day\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-02-06',\n 'description': 'Waitangi Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-04-07',\n 'description': 'Good Friday',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-10',\n 'description': 'Easter Monday',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-25',\n 'description': 'ANZAC Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-06-05',\n 'description': \"Queen's Birthday\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-10-23',\n 'description': 'Labour Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-26',\n 'description': 'Boxing Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.30687829852104187, "alphanum_fraction": 0.35305434465408325, "avg_line_length": 20.224489212036133, "blob_id": "a1ca0b8ae04dea5e8ca351d0c562edd461a7bee0", "content_id": "df7c110e5a2d1c12546418dccc8feb9c9962ba4a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2086, "license_type": "permissive", "max_line_length": 45, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': 'Nouvel An',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-04-05',\n 'description': 'Pâques',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-06',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-01',\n 'description': 'Fête du Travail',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-14',\n 'description': 'Ascension',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-24',\n 'description': 'Pentecôte',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-25',\n 'description': 'Lundi de Pentecôte',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-07-21',\n 'description': 'Fête nationale',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-08-15',\n 'description': 'Assomption',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-11-01',\n 'description': 'Toussaint',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-11-11',\n 'description': \"Jour de l'armistice\",\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Noël',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3165598213672638, "alphanum_fraction": 0.3620418608188629, "avg_line_length": 20.873016357421875, "blob_id": "6c1c9570242092cfe1a65cc879c2774dfa3c1b37", "content_id": "718f6d6b2ca14fdf1b9afc3336e6d94ae0c9e563", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8267, "license_type": "permissive", "max_line_length": 48, "num_lines": 378, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_CA-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-CA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-02-20',\n 'description': 'Family Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'AB',\n 'type': 'V'\n },\n {\n 'date': '2023-02-20',\n 'description': 'Family Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'ON',\n 'type': 'V'\n },\n {\n 'date': '2023-02-20',\n 'description': 'Family Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'SK',\n 'type': 'V'\n },\n {\n 'date': '2023-02-20',\n 'description': 'Family Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'NB',\n 'type': 'V'\n },\n {\n 'date': '2023-02-20',\n 'description': 'Louis Riel Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'MB',\n 'type': 'V'\n },\n {\n 'date': '2023-02-20',\n 'description': 'Islander Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'PE',\n 'type': 'V'\n },\n {\n 'date': '2023-04-07',\n 'description': 'Good Friday',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-10',\n 'description': 'Easter Monday',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'AB',\n 'type': 'RV'\n },\n {\n 'date': '2023-04-10',\n 'description': 'Easter Monday',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'PE',\n 'type': 'RV'\n },\n {\n 'date': '2023-04-10',\n 'description': 'Easter Monday',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'QC',\n 'type': 'RV'\n },\n {\n 'date': '2023-05-22',\n 'description': \"National Patriots' Day\",\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'QC',\n 'type': 'V'\n },\n {\n 'date': '2023-05-22',\n 'description': 'Victoria Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'AB',\n 'type': 'V'\n },\n {\n 'date': '2023-05-22',\n 'description': 'Victoria Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'BC',\n 'type': 'V'\n },\n {\n 'date': '2023-05-22',\n 'description': 'Victoria Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'MB',\n 'type': 'V'\n },\n {\n 'date': '2023-05-22',\n 'description': 'Victoria Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'NS',\n 'type': 'V'\n },\n {\n 'date': '2023-05-22',\n 'description': 'Victoria Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'ON',\n 'type': 'V'\n },\n {\n 'date': '2023-05-22',\n 'description': 'Victoria Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'SK',\n 'type': 'V'\n },\n {\n 'date': '2023-05-22',\n 'description': 'Victoria Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'NT',\n 'type': 'V'\n },\n {\n 'date': '2023-05-22',\n 'description': 'Victoria Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'NU',\n 'type': 'V'\n },\n {\n 'date': '2023-05-22',\n 'description': 'Victoria Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'YT',\n 'type': 'V'\n },\n {\n 'date': '2023-06-24',\n 'description': 'National Holiday',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'QC',\n 'type': 'F'\n },\n {\n 'date': '2023-07-01',\n 'description': 'Canada Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-08-07',\n 'description': 'August Civic Holiday',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'NT',\n 'type': 'V'\n },\n {\n 'date': '2023-08-07',\n 'description': 'August Civic Holiday',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'NU',\n 'type': 'V'\n },\n {\n 'date': '2023-08-07',\n 'description': 'Saskatchewan Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'SK',\n 'type': 'V'\n },\n {\n 'date': '2023-08-07',\n 'description': 'Heritage Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'AB',\n 'type': 'V'\n },\n {\n 'date': '2023-08-07',\n 'description': 'Heritage Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'NS',\n 'type': 'V'\n },\n {\n 'date': '2023-08-07',\n 'description': 'New Brunswick Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'NB',\n 'type': 'V'\n },\n {\n 'date': '2023-09-04',\n 'description': 'Labour Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-10-09',\n 'description': 'Thanksgiving Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'AB',\n 'type': 'V'\n },\n {\n 'date': '2023-10-09',\n 'description': 'Thanksgiving Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'BC',\n 'type': 'V'\n },\n {\n 'date': '2023-10-09',\n 'description': 'Thanksgiving Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'MB',\n 'type': 'V'\n },\n {\n 'date': '2023-10-09',\n 'description': 'Thanksgiving Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'NL',\n 'type': 'V'\n },\n {\n 'date': '2023-10-09',\n 'description': 'Thanksgiving Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'ON',\n 'type': 'V'\n },\n {\n 'date': '2023-10-09',\n 'description': 'Thanksgiving Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'QC',\n 'type': 'V'\n },\n {\n 'date': '2023-10-09',\n 'description': 'Thanksgiving Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'SK',\n 'type': 'V'\n },\n {\n 'date': '2023-10-09',\n 'description': 'Thanksgiving Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'NT',\n 'type': 'V'\n },\n {\n 'date': '2023-10-09',\n 'description': 'Thanksgiving Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'NU',\n 'type': 'V'\n },\n {\n 'date': '2023-10-09',\n 'description': 'Thanksgiving Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'YT',\n 'type': 'V'\n },\n {\n 'date': '2023-11-11',\n 'description': 'Remembrance Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'AB',\n 'type': 'F'\n },\n {\n 'date': '2023-11-11',\n 'description': 'Remembrance Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'BC',\n 'type': 'F'\n },\n {\n 'date': '2023-11-11',\n 'description': 'Remembrance Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'NB',\n 'type': 'F'\n },\n {\n 'date': '2023-11-11',\n 'description': 'Remembrance Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'NL',\n 'type': 'F'\n },\n {\n 'date': '2023-11-11',\n 'description': 'Remembrance Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': 'NT',\n 'type': 'F'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-26',\n 'description': 'Boxing Day',\n 'locale': 'en-CA',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3056092858314514, "alphanum_fraction": 0.3520309329032898, "avg_line_length": 20.112245559692383, "blob_id": "47b298ce6aabb0c404b8eb410d8e83ced8825f56", "content_id": "0f2c3de7281c4a61d11713cc4245cfe732bdccf2", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2068, "license_type": "permissive", "max_line_length": 42, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-04-04',\n 'description': 'Ostern',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Ostermontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-13',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-23',\n 'description': 'Pfingsten',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pfingstmontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-07-21',\n 'description': 'Nationalfeiertag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-11-11',\n 'description': 'Waffenstillstand',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Weihnacht',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.5982142686843872, "alphanum_fraction": 0.5982142686843872, "avg_line_length": 15, "blob_id": "b9b6a57b2ebcb8df138cce7cee2b8a851d2fc837", "content_id": "7c6254971caaf4265cc60f151eb792ab28f86a86", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 112, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/PT.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass PT(Country):\n id = \"PT\"\n languages = [\"pt\"]\n default_lang = \"pt\"\n" }, { "alpha_fraction": 0.30687829852104187, "alphanum_fraction": 0.35305434465408325, "avg_line_length": 20.224489212036133, "blob_id": "5fa3334164af2b959d973db0b776df84b554a875", "content_id": "360ea757acfd501639dc14faa5fe2a2f73149d54", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2086, "license_type": "permissive", "max_line_length": 45, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': 'Nouvel An',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-03-31',\n 'description': 'Pâques',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-01',\n 'description': 'Fête du Travail',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-09',\n 'description': 'Ascension',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-19',\n 'description': 'Pentecôte',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-20',\n 'description': 'Lundi de Pentecôte',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-07-21',\n 'description': 'Fête nationale',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-08-15',\n 'description': 'Assomption',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-11-01',\n 'description': 'Toussaint',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-11-11',\n 'description': \"Jour de l'armistice\",\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-12-25',\n 'description': 'Noël',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3381712734699249, "alphanum_fraction": 0.3817126154899597, "avg_line_length": 21.598360061645508, "blob_id": "dc28b5efac9d0ec2e902f3dc792ee526d101ddb5", "content_id": "29ddfef17a34512786767dccd78abe1706e4fd82", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2790, "license_type": "permissive", "max_line_length": 58, "num_lines": 122, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2018] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2018-01-01',\n 'description': 'Deň vzniku Slovenskej republiky',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-01-06',\n 'description': 'Zjavenie Pána / Traja králi',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-03-30',\n 'description': 'Veľký piatok',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-02',\n 'description': 'Veľkonočný pondelok',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-05-01',\n 'description': 'Sviatok práce',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-05-08',\n 'description': 'Deň víťazstva nad fašizmom',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-07-05',\n 'description': 'Sviatok svätého Cyrila a Metoda',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-08-29',\n 'description': 'Výročie SNP',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-09-01',\n 'description': 'Deň Ústavy Slovenskej republiky',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-09-15',\n 'description': 'Sedembolestná Panna Mária',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-11-01',\n 'description': 'Sviatok všetkých svätých',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-11-17',\n 'description': 'Deň boja za slobodu a demokraciu',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-12-24',\n 'description': 'Štedrý deň',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-12-25',\n 'description': 'Prvý sviatok vianočný',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-12-26',\n 'description': 'Druhý sviatok vianočný',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.5982142686843872, "alphanum_fraction": 0.5982142686843872, "avg_line_length": 15, "blob_id": "8ed003ca28d1b11159d6353540b43ca671e7d62b", "content_id": "dc89e8c61f73259226c352f9d535379e92b4b122", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 112, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/EE.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass EE(Country):\n id = \"EE\"\n languages = [\"et\"]\n default_lang = \"et\"\n" }, { "alpha_fraction": 0.31142035126686096, "alphanum_fraction": 0.3574855923652649, "avg_line_length": 20.275510787963867, "blob_id": "fef513652e5cc4a882914ea681cf52544faab3af", "content_id": "c75f2b01dea4c8df53a26803c4058b89f2732c00", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2088, "license_type": "permissive", "max_line_length": 47, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2020] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2020-01-01',\n 'description': 'Nytårsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-04-09',\n 'description': 'Skærtorsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-10',\n 'description': 'Langfredag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-12',\n 'description': 'Påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Anden påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-08',\n 'description': 'Store bededag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-21',\n 'description': 'Kristi himmelfartsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-31',\n 'description': 'Pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-06-01',\n 'description': 'Anden pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-06-05',\n 'description': 'Grundlovsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-12-25',\n 'description': 'Juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-26',\n 'description': 'Anden juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.320070743560791, "alphanum_fraction": 0.3648688495159149, "avg_line_length": 21.03896141052246, "blob_id": "dd8bfc49e2fa6e221104d20814d15e46254d3ca1", "content_id": "e83bf1022dcd59205a2cd434ebf9da5642a9e3e6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3406, "license_type": "permissive", "max_line_length": 53, "num_lines": 154, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Año Nuevo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-01-10',\n 'description': 'Día de los Reyes Magos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-03-21',\n 'description': 'Día de San José',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-14',\n 'description': 'Jueves Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Viernes Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-17',\n 'description': 'Domingo de Pascua',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Día del Trabajo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-30',\n 'description': 'La Ascensión del Señor',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-20',\n 'description': 'Corpus Christi',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-27',\n 'description': 'El Sagrado Corazón de Jesús',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-07-04',\n 'description': 'San Pedro y San Pablo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-07-20',\n 'description': 'Grito de Independencia',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-08-07',\n 'description': 'Batalla de Boyacá',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Asunción de la Virgen',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-10-17',\n 'description': 'Día de la Raza',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-11-07',\n 'description': 'Todos los Santos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-11-14',\n 'description': 'Independencia de Cartagena',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-12-08',\n 'description': 'Inmaculada Concepción',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Navidad',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31142035126686096, "alphanum_fraction": 0.3574855923652649, "avg_line_length": 20.275510787963867, "blob_id": "3dec79078787e1f666be7801809f278ddafe8e28", "content_id": "079d986cbf3647ac0c43ea27bccd3fb1fc33a116", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2088, "license_type": "permissive", "max_line_length": 47, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': 'Nytårsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-04-18',\n 'description': 'Skærtorsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Langfredag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-21',\n 'description': 'Påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Anden påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-05-17',\n 'description': 'Store bededag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-05-30',\n 'description': 'Kristi himmelfartsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-05',\n 'description': 'Grundlovsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-06-09',\n 'description': 'Pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Anden pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Anden juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3293954133987427, "alphanum_fraction": 0.37665045261383057, "avg_line_length": 21.146154403686523, "blob_id": "b5144379c943744b9e1861dad73aafc44ca8d36f", "content_id": "dab329e79206353a063237d58822066db738f298", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2900, "license_type": "permissive", "max_line_length": 49, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': 'Nýársdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-04-21',\n 'description': 'Skírdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-21',\n 'description': 'Sumardagurinn fyrsti',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2011-04-22',\n 'description': 'Föstudagurinn langi',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-24',\n 'description': 'Páskadagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-25',\n 'description': 'Annar dagur páska',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-05-01',\n 'description': 'Verkalýðsdagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-02',\n 'description': 'Uppstigningardagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-12',\n 'description': 'Hvítasunnudagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-13',\n 'description': 'Annar dagur hvítasunnu',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-17',\n 'description': 'Þjóðhátíðardagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-08-01',\n 'description': 'Frídagur verslunarmanna',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2011-12-24',\n 'description': 'Aðfangadagur jóla',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Jóladagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-26',\n 'description': 'Annar dagur jóla',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-31',\n 'description': 'Gamlársdagur',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3381712734699249, "alphanum_fraction": 0.3817126154899597, "avg_line_length": 21.598360061645508, "blob_id": "96f6a29dcfd731acc4cf66f305ac6dd7b147a69d", "content_id": "3cdfd138662102f4815fdc2bc471e0a41e8b8406", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2790, "license_type": "permissive", "max_line_length": 58, "num_lines": 122, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': 'Deň vzniku Slovenskej republiky',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-01-06',\n 'description': 'Zjavenie Pána / Traja králi',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-03-29',\n 'description': 'Veľký piatok',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Veľkonočný pondelok',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-01',\n 'description': 'Sviatok práce',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-08',\n 'description': 'Deň víťazstva nad fašizmom',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-07-05',\n 'description': 'Sviatok svätého Cyrila a Metoda',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-08-29',\n 'description': 'Výročie SNP',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-09-01',\n 'description': 'Deň Ústavy Slovenskej republiky',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-09-15',\n 'description': 'Sedembolestná Panna Mária',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-11-01',\n 'description': 'Sviatok všetkých svätých',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-11-17',\n 'description': 'Deň boja za slobodu a demokraciu',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-12-24',\n 'description': 'Štedrý deň',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-25',\n 'description': 'Prvý sviatok vianočný',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-26',\n 'description': 'Druhý sviatok vianočný',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.6038338541984558, "alphanum_fraction": 0.6613418459892273, "avg_line_length": 25.08333396911621, "blob_id": "f36a5013ecc70169aaff222dbd7241a6e2b35748", "content_id": "ea4f4a72957d2318cf3b42e3389f725b25bc29bc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 626, "license_type": "permissive", "max_line_length": 58, "num_lines": 24, "path": "/src/holidata/holidays/nl-BE.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom .holidays import Locale\n\n\nclass nl_BE(Locale):\n \"\"\"\n 01-01: [NF] Nieuwjaar\n 05-01: [NF] Dag van de arbeid\n 07-21: [NF] Nationale feestdag\n 08-15: [NRF] Onze Lieve Vrouw hemelvaart\n 11-01: [NRF] Allerheiligen\n 11-11: [NF] Wapenstilstand\n 12-25: [NRF] Kerstmis\n Easter: [NRV] Pasen\n 1 day after Easter: [NRV] Paasmaandag\n 39 days after Easter: [NRV] Onze Lieve Heer hemelvaart\n 49 days after Easter: [NRV] Pinksteren\n 50 days after Easter: [NRV] Pinkstermaandag\n \"\"\"\n\n locale = \"nl-BE\"\n easter_type = EASTER_WESTERN\n" }, { "alpha_fraction": 0.3047436475753784, "alphanum_fraction": 0.3545759320259094, "avg_line_length": 20.306121826171875, "blob_id": "1bd2b5f7816be194eb8e5cd2f2390851fd93f729", "content_id": "4fc991b06d2a8c095ba97f70ee5cb8e4787941d6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2093, "license_type": "permissive", "max_line_length": 45, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': \"Jour de l'an\",\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Fête du premier mai',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-05-08',\n 'description': 'Armistice 1945',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-05-30',\n 'description': 'Ascension',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-09',\n 'description': 'Pentecôte',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Lundi de Pentecôte',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-07-14',\n 'description': 'Fête nationale',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-08-15',\n 'description': 'Assomption',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-11-01',\n 'description': 'Toussaint',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-11-11',\n 'description': 'Armistice 1918',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Noël',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.32159623503685, "alphanum_fraction": 0.36760562658309937, "avg_line_length": 20.744897842407227, "blob_id": "bd42c2c85487033568b12906fca2b78af04fc558", "content_id": "bb78c4d1461293a4efe45615b3e7a8696bf63c52", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2149, "license_type": "permissive", "max_line_length": 67, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': 'Uusaasta',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-02-24',\n 'description': 'Iseseisvuspäev, Eesti Vabariigi aastapäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-03-29',\n 'description': 'Suur reede',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-03-31',\n 'description': 'Ülestõusmispühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-01',\n 'description': 'Kevadpüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-19',\n 'description': 'Nelipühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-06-23',\n 'description': 'Võidupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-06-24',\n 'description': 'Jaanipäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-08-20',\n 'description': 'Taasiseseisvumispäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-12-24',\n 'description': 'Jõululaupäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-12-25',\n 'description': 'Esimene jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-12-26',\n 'description': 'Teine jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3047436475753784, "alphanum_fraction": 0.3545759320259094, "avg_line_length": 20.306121826171875, "blob_id": "bb91f9b680a4b3d414c3137bf643c5719aea02c6", "content_id": "6b5839a7dc1ad1f77aef2e91b09b5443e43f9d17", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2093, "license_type": "permissive", "max_line_length": 45, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2020] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2020-01-01',\n 'description': \"Jour de l'an\",\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-01',\n 'description': 'Fête du premier mai',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-08',\n 'description': 'Armistice 1945',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-21',\n 'description': 'Ascension',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-31',\n 'description': 'Pentecôte',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-06-01',\n 'description': 'Lundi de Pentecôte',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-07-14',\n 'description': 'Fête nationale',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-08-15',\n 'description': 'Assomption',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-11-01',\n 'description': 'Toussaint',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-11-11',\n 'description': 'Armistice 1918',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-12-25',\n 'description': 'Noël',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3139752745628357, "alphanum_fraction": 0.36792805790901184, "avg_line_length": 20.88524627685547, "blob_id": "11bb47a3e5f08b852d46b6aedab27f1054d68812", "content_id": "a2764bd5e7fa46bc50c6babb19d64cfed3aa01da", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2695, "license_type": "permissive", "max_line_length": 54, "num_lines": 122, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Újév',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-03-14',\n 'description': 'Munkaszüneti nap',\n 'locale': 'hu-HU',\n 'notes': '2022-03-26 munkanap',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-03-15',\n 'description': 'Az 1848-as forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Nagypéntek',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-17',\n 'description': 'Húsvét',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Húsvéthétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-01',\n 'description': 'A munka ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-06-05',\n 'description': 'Pünkösd',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pünkösdhétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-08-20',\n 'description': 'Az államalapítás ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-10-23',\n 'description': 'Az 1956-os forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-10-31',\n 'description': 'Munkaszüneti nap',\n 'locale': 'hu-HU',\n 'notes': '2022-10-15 munkanap',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Mindenszentek',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3136601746082306, "alphanum_fraction": 0.3593526780605316, "avg_line_length": 20.4489803314209, "blob_id": "bdcbbc40208138c18acb15ff477f83b13ff55010", "content_id": "8d366b0ad6752e3e8709e26928c8d8656232b902", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2101, "license_type": "permissive", "max_line_length": 49, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[it_IT-2017] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2017-01-01',\n 'description': 'Capodanno',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-01-06',\n 'description': 'Epifania',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-04-16',\n 'description': 'Pasqua',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-17',\n 'description': 'Pasquetta',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-25',\n 'description': 'Festa della liberazione',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-05-01',\n 'description': 'Festa del lavoro',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-06-02',\n 'description': 'Festa della repubblica',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-08-15',\n 'description': 'Assunzione (ferragosto)',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-11-01',\n 'description': 'Ognissanti',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-08',\n 'description': 'Immacolata concezione',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-25',\n 'description': 'Natale',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-26',\n 'description': 'S.to Stefano',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.4129386246204376, "alphanum_fraction": 0.45860040187835693, "avg_line_length": 30.758657455444336, "blob_id": "7bca12d4ac5d8183db03d81a0f71a3cce1dcbbf1", "content_id": "520410322901ff8dc816996b9a4128b0565b4831", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 30320, "license_type": "permissive", "max_line_length": 130, "num_lines": 953, "path": "/src/holidata/holidays/es-ES.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom holidata.utils import SmartDayArrow, easter\nfrom .holidays import Locale, Holiday\n\n\"\"\"\nInformation taken from government websites around 2020-06\n https://administracion.gob.es/pag_Home/atencionCiudadana/calendarios/laboral.html\n http://www.seg-social.es/wps/portal/wss/internet/CalendarioLaboral\n\n 2011: https://www.boe.es/eli/es/res/2010/10/07/(1)\n https://www.boe.es/eli/es/res/2010/11/24/(1)\n 2012: https://www.boe.es/eli/es/res/2011/10/06/(1)\n 2013: https://www.boe.es/eli/es/res/2012/10/30/(1)\n https://www.boe.es/eli/es/res/2012/11/12/(2)\n 2014: https://www.boe.es/eli/es/res/2013/11/08/(3)\n https://www.boe.es/eli/es/res/2013/11/21/(1)\n 2015: https://www.boe.es/eli/es/res/2014/10/17/(3)\n 2016: https://www.boe.es/eli/es/res/2015/10/19/(1)\n 2017: https://www.boe.es/eli/es/res/2016/10/04/(1)\n 2018: https://www.boe.es/eli/es/res/2017/10/09/(1)\n https://www.boe.es/eli/es/res/2017/10/09/(1)/corrigendum/20171019\n https://www.boe.es/eli/es/res/2017/10/09/(1)/corrigendum/20171025\n 2019: https://www.boe.es/eli/es/res/2018/10/16/(1)\n 2020: https://www.boe.es/eli/es/res/2019/10/03/(1)\n 2021: https://www.boe.es/eli/es/res/2020/10/28/(1)\n 2022: https://www.boe.es/eli/es/res/2021/10/14/(3)\n 2023: https://www.boe.es/eli/es/res/2022/10/07/(2)\n\nRegional governments\n [AN] https://www.juntadeandalucia.es/temas/trabajar/relaciones/calendario.html\n\nAlso those sites for some information\n https://es.wikipedia.org/wiki/Calendario_laboral\n\"\"\"\n\n\nclass es_ES(Locale):\n \"\"\"\n 01-01: [NF] Año Nuevo\n 01-06: [NRF] Epifanía del Señor\n 05-01: [NF] Fiesta del Trabajo\n 08-15: [NRF] Asunción de la Virgen\n 10-12: [NF] Fiesta Nacional de España\n 11-01: [NRF] Todos los Santos\n 12-06: [NF] Día de la Constitución Española\n 12-08: [NRF] Inmaculada Concepción\n 12-25: [NRF] Natividad del Señor\n 2 days before Easter: [NRV] Viernes Santo\n Easter: [NRV] Pascua\n \"\"\"\n\n locale = \"es-ES\"\n easter_type = EASTER_WESTERN\n\n def holiday_lunes_siguiente_al_ano_nuevo(self):\n if self.year == 2012:\n regions = [\"\"]\n elif self.year == 2017:\n regions = [\"AN\", \"AR\", \"AS\", \"CL\", \"MC\", \"ML\"]\n elif self.year == 2023:\n regions = [\"AN\", \"AR\", \"AS\", \"CL\", \"MC\"]\n else:\n return []\n\n return [Holiday(\n self.locale,\n region,\n SmartDayArrow(self.year, 1, 1).shift_to_weekday(\"monday\", including=True),\n \"Lunes siguiente al Año Nuevo\",\n \"NF\" if regions == [\"\"] else \"F\"\n ) for region in regions]\n\n def holiday_lunes_siguiente_a_la_epifania_del_senor(self):\n if self.year == 2013:\n regions = [\"AN\", \"AR\", \"AS\", \"CB\", \"CE\", \"CL\", \"CM\", \"CN\", \"EX\", \"MC\", \"MD\", \"ML\", \"NC\"]\n elif self.year == 2019:\n regions = [\"AN\", \"AR\", \"AS\", \"CE\", \"CL\", \"CN\", \"EX\", \"MC\", \"MD\", \"ML\", \"NC\"]\n else:\n return []\n\n return [Holiday(\n self.locale,\n region,\n SmartDayArrow(self.year, 1, 6).shift_to_weekday(\"monday\", including=True),\n \"Lunes siguiente a la Epifanía del Señor\",\n \"NRF\" if regions == [\"\"] else \"RF\"\n ) for region in regions]\n\n def holiday_martes_de_carnaval(self):\n if self.year == 2023:\n return [Holiday(\n self.locale,\n \"EX\",\n SmartDayArrow(self.year, 2, 21),\n \"Martes de Carnaval\",\n \"V\"\n )]\n else:\n return []\n\n def holiday_dia_de_andalucia(self):\n if self.year == 2016:\n date = SmartDayArrow(self.year, 2, 29)\n elif self.year == 2021:\n date = SmartDayArrow(self.year, 3, 1)\n else:\n date = SmartDayArrow(self.year, 2, 28)\n\n return [Holiday(\n self.locale,\n \"AN\",\n date,\n \"Día de Andalucía\",\n \"F\"\n )]\n\n def holiday_dia_de_las_illes_balears(self):\n if self.year in [2011, 2012, 2013, 2014, 2016, 2017, 2018, 2019, 2021, 2022, 2023]:\n return [Holiday(\n self.locale,\n \"IB\",\n SmartDayArrow(self.year, 3, 1),\n \"Día de las Illes Balears\",\n \"F\"\n )]\n else:\n return []\n\n def holiday_estatuto_de_autonomia_de_la_ciudad_de_melilla(self):\n if self.year in [2020, 2021]:\n return [Holiday(\n self.locale,\n \"ML\",\n SmartDayArrow(self.year, 3, 13),\n \"Estatuto de Autonomía de la Ciudad de Melilla\",\n \"F\"\n )]\n else:\n return []\n\n def holiday_san_jose(self):\n if self.year == 2011:\n regions = [\"CM\", \"GA\", \"MC\", \"ML\", \"VC\"]\n elif self.year == 2012:\n regions = [\"CL\", \"MC\", \"MD\", \"ML\", \"NC\", \"RI\", \"VC\"]\n elif self.year == 2013:\n regions = [\"MC\", \"ML\", \"VC\"]\n elif self.year == 2014:\n regions = [\"MC\", \"ML\", \"NC\", \"VC\"]\n elif self.year == 2015:\n regions = [\"MC\", \"MD\", \"ML\", \"NC\", \"PV\", \"VC\"]\n elif self.year in [2016]:\n regions = [\"MC\", \"ML\", \"VC\"]\n elif self.year == 2018:\n regions = [\"MC\", \"VC\"]\n elif self.year == 2019:\n regions = [\"GA\", \"MC\", \"NC\", \"PV\", \"VC\"]\n elif self.year == 2020:\n regions = [\"CM\", \"GA\", \"MC\", \"NC\", \"PV\", \"VC\"]\n elif self.year == 2021:\n regions = [\"EX\", \"GA\", \"MC\", \"MD\", \"NC\", \"PV\", \"VC\"]\n elif self.year == 2022:\n regions = [\"VC\"]\n elif self.year == 2023:\n regions = [\"MD\"]\n else:\n return []\n\n return [Holiday(\n self.locale,\n region,\n SmartDayArrow(self.year, 3, 19),\n \"San José\",\n \"NRF\" if regions == [\"\"] else \"RF\"\n ) for region in regions]\n\n def holiday_lunes_de_fallas(self):\n if self.year == 2013:\n return [Holiday(\n self.locale,\n \"VC\",\n SmartDayArrow(self.year, 3, 18),\n \"Lunes de Fallas\",\n \"F\"\n )]\n else:\n return []\n\n def holiday_traslado_de_san_jose(self):\n if self.year == 2013:\n return [Holiday(\n self.locale,\n \"MD\",\n SmartDayArrow(self.year, 3, 18),\n \"Traslado de San José\",\n \"F\"\n )]\n else:\n return []\n\n def holiday_dia_siguente_a_san_jose(self):\n if self.year == 2015:\n return [Holiday(\n self.locale,\n \"GA\",\n SmartDayArrow(self.year, 3, 20),\n \"Día siguiente a San José\",\n \"RF\"\n )]\n else:\n return []\n\n def holiday_lunes_siguiente_a_san_jose(self):\n if self.year in [2017]:\n regions = [\"EX\", \"MD\"]\n elif self.year == 2023:\n regions = [\"MD\"]\n else:\n return []\n\n return [Holiday(\n self.locale,\n region,\n SmartDayArrow(self.year, 3, 19).shift_to_weekday(\"monday\", True),\n \"Lunes siguiente a San José\",\n \"NRF\" if regions == [\"\"] else \"RF\"\n ) for region in regions]\n\n def holiday_san_jorge__dia_de_aragon(self):\n if self.year == 2017:\n date = SmartDayArrow(self.year, 4, 24)\n else:\n date = SmartDayArrow(self.year, 4, 23)\n\n return [Holiday(\n self.locale,\n \"AR\",\n date,\n \"San Jorge / Día de Aragón\",\n \"RF\"\n )]\n\n def holiday_lunes_siguiente_a_san_jorge__dia_de_aragon(self):\n if self.year == 2023:\n date = SmartDayArrow(self.year, 4, 24)\n else:\n return []\n\n return [Holiday(\n self.locale,\n \"AR\",\n date,\n \"Lunes siguiente a San Jorge / Día de Aragón\",\n \"RF\"\n )]\n\n def holiday_fiesta_de_castilla_y_leon(self):\n if self.year == 2017:\n date = SmartDayArrow(self.year, 4, 24)\n else:\n date = SmartDayArrow(self.year, 4, 23)\n\n return [Holiday(\n self.locale,\n \"CL\",\n date,\n \"Fiesta de Castilla y León\",\n \"F\"\n )]\n\n def holiday_lunes_siguiente_a_la_fiesta_del_trabajo(self):\n if self.year == 2011:\n regions = [\"AN\", \"AR\", \"AS\", \"CB\", \"CE\", \"EX\", \"MC\", \"VC\"]\n elif self.year == 2016:\n regions = [\"AN\", \"AR\", \"AS\", \"CL\", \"CN\", \"EX\", \"MD\"]\n elif self.year == 2022:\n regions = [\"AN\", \"AR\", \"AS\", \"CL\", \"EX\", \"MC\"]\n else:\n return []\n\n return [Holiday(\n self.locale,\n region,\n SmartDayArrow(self.year, 5, 1).shift_to_weekday(\"monday\", including=True),\n \"Lunes siguiente a la Fiesta del Trabajo\",\n \"NF\" if regions == [\"\"] else \"F\"\n ) for region in regions]\n\n def holiday_fiesta_de_la_comunidad_de_madrid(self):\n if self.year in [2016, 2021]:\n return []\n else:\n return [Holiday(\n self.locale,\n \"MD\",\n SmartDayArrow(self.year, 5, 2),\n \"Fiesta de la Comunidad de Madrid\",\n \"F\"\n )]\n\n def holiday_lunes_siguiente_al_dia_de_la_comunidad_de_madrid(self):\n if self.year in [2021]:\n return [Holiday(\n self.locale,\n \"MD\",\n SmartDayArrow(self.year, 5, 2).shift_to_weekday(\"monday\", including=True),\n \"Lunes siguiente al Día de la Comunidad de Madrid\",\n \"F\"\n )]\n else:\n return []\n\n def holiday_dia_de_las_letras_gallegas(self):\n if self.year in [2015, 2020]:\n return []\n else:\n return [Holiday(\n self.locale,\n \"GA\",\n SmartDayArrow(self.year, 5, 17),\n \"Día de las Letras Gallegas\",\n \"F\"\n )]\n\n def holiday_dia_de_canarias(self):\n if self.year in [2021]:\n return []\n else:\n return [Holiday(\n self.locale,\n \"CN\",\n SmartDayArrow(self.year, 5, 30),\n \"Día de Canarias\",\n \"F\"\n )]\n\n def holiday_dia_de_castilla_la_mancha(self):\n if self.year in [2014, 2015, 2020]:\n return []\n else:\n return [Holiday(\n self.locale,\n \"CM\",\n SmartDayArrow(self.year, 5, 31),\n \"Día de Castilla-La Mancha\",\n \"F\"\n )]\n\n def holiday_dia_de_la_region_de_murcia(self):\n if self.year == 2013:\n return []\n elif self.year == 2019:\n date = SmartDayArrow(self.year, 6, 10)\n else:\n date = SmartDayArrow(self.year, 6, 9)\n\n return [Holiday(\n self.locale,\n \"MC\",\n date,\n \"Día de la Región de Murcia\",\n \"F\"\n )]\n\n def holiday_dia_de_la_rioja(self):\n if self.year in [2013, 2019]:\n date = SmartDayArrow(self.year, 6, 10)\n else:\n date = SmartDayArrow(self.year, 6, 9)\n\n if date is not None:\n return [Holiday(\n self.locale,\n \"RI\",\n date,\n \"Día de La Rioja\",\n \"F\"\n )]\n else:\n return []\n\n def holiday_san_juan(self):\n if self.year == 2011:\n regions = [\"CT\"]\n elif self.year == 2013:\n regions = [\"CT\", \"GA\"]\n elif self.year == 2014:\n regions = [\"CT\"]\n elif self.year == 2015:\n regions = [\"CT\"]\n elif self.year == 2016:\n regions = [\"CT\", \"GA\"]\n elif self.year == 2017:\n regions = [\"CT\"]\n elif self.year == 2019:\n regions = [\"CT\", \"VC\"]\n elif self.year == 2020:\n regions = [\"CT\", \"GA\", \"VC\"]\n elif self.year == 2021:\n regions = [\"CT\", \"VC\"]\n elif self.year == 2022:\n regions = [\"CT\", \"GA\", \"VC\"]\n elif self.year == 2023:\n regions = [\"CT\", \"VC\"]\n else:\n return []\n\n return [Holiday(\n self.locale,\n region,\n SmartDayArrow(self.year, 6, 24),\n \"San Juan\",\n \"NRF\" if regions == [\"\"] else \"RF\"\n ) for region in regions]\n\n def holiday_santiago_apostol__dia_nacional_de_galicia(self):\n if self.year == 2021:\n return []\n elif self.year == 2023:\n regions = [\"CL\", \"GA\", \"NC\", \"PV\"]\n else:\n regions = [\"GA\"]\n\n return [Holiday(\n self.locale,\n region,\n SmartDayArrow(self.year, 7, 25),\n \"Santiago Apóstol / Día Nacional de Galicia\",\n \"RF\"\n ) for region in regions]\n\n def holiday_santiago_apostol(self):\n if self.year == 2011:\n regions = [\"CL\", \"MD\", \"NC\", \"PV\", \"RI\"]\n elif self.year == 2012:\n regions = [\"CB\"]\n elif self.year == 2013:\n regions = [\"CB\", \"NC\", \"PV\"]\n elif self.year == 2014:\n regions = [\"CB\"]\n elif self.year == 2015:\n regions = [\"NC\", \"PV\"]\n elif self.year == 2016:\n regions = [\"MD\", \"NC\", \"PV\", \"RI\"]\n elif self.year == 2017:\n regions = [\"NC\", \"PV\"]\n elif self.year == 2019:\n regions = [\"CB\", \"PV\"]\n elif self.year == 2020:\n regions = [\"PV\"]\n elif self.year == 2022:\n regions = [\"MD\", \"NC\", \"PV\"]\n else:\n return []\n\n return [Holiday(\n self.locale,\n region,\n SmartDayArrow(self.year, 7, 25),\n \"Santiago Apóstol\",\n \"NRF\" if regions == [\"\"] else \"RF\"\n ) for region in regions]\n\n def holiday_dia_de_las_instituciones_de_cantabria(self):\n if self.year in [2011, 2016, 2017, 2018, 2020, 2021, 2022, 2023]:\n return [Holiday(\n self.locale,\n \"CB\",\n SmartDayArrow(self.year, 7, 28),\n \"Día de las Instituciones de Cantabria\",\n \"F\"\n )]\n else:\n return []\n\n def holiday_nuestra_senora_de_africa(self):\n if self.year in [2022, 2023]:\n return [Holiday(\n self.locale,\n \"CE\",\n SmartDayArrow(self.year, 8, 5),\n \"Nuestra Señora de África\",\n \"RF\"\n )]\n else:\n return []\n\n def holiday_lunes_siguiente_a_la_asuncion_de_la_virgen(self):\n if self.year == 2021:\n regions = [\"AN\", \"AR\", \"AS\", \"CL\", \"CN\"]\n else:\n return []\n\n return [Holiday(\n self.locale,\n region,\n SmartDayArrow(self.year, 8, 15).shift_to_weekday(\"monday\", including=True),\n \"Lunes siguiente a la Asunción de la Virgen\",\n \"RF\"\n ) for region in regions]\n\n def holiday_dia_de_ceuta(self):\n if self.year in [2016, 2017, 2019, 2020, 2021, 2022, 2023]:\n return [Holiday(\n self.locale,\n \"CE\",\n SmartDayArrow(self.year, 9, 2),\n \"Día de Ceuta\",\n \"F\"\n )]\n else:\n return []\n\n def holiday_v_centenario_vuelta_al_mundo(self):\n if self.year == 2022:\n return [Holiday(\n self.locale,\n \"PV\",\n SmartDayArrow(self.year, 9, 6),\n \"V Centenario Vuelta al Mundo\",\n \"F\"\n )]\n else:\n return []\n\n def holiday_dia_de_asturias(self):\n if self.year in [2019]:\n return []\n else:\n return [Holiday(\n self.locale,\n \"AS\",\n SmartDayArrow(self.year, 9, 8),\n \"Día de Asturias\",\n \"F\"\n )]\n\n def holiday_lunes_siguiente_al_dia_de_asturias(self):\n if self.year in [2013, 2019]:\n return [Holiday(\n self.locale,\n \"AS\",\n SmartDayArrow(self.year, 9, 8).shift_to_weekday(\"monday\", including=True),\n \"Lunes siguiente al Día de Asturias\",\n \"F\"\n )]\n else:\n return []\n\n def holiday_dia_de_extremadura(self):\n if self.year in [2019]:\n return []\n else:\n return [Holiday(\n self.locale,\n \"EX\",\n SmartDayArrow(self.year, 9, 8),\n \"Día de Extremadura\",\n \"F\"\n )]\n\n def holiday_lunes_siguiente_al_dia_de_extremadura(self):\n if self.year in [2013, 2019]:\n return [Holiday(\n self.locale,\n \"EX\",\n SmartDayArrow(self.year, 9, 8).shift_to_weekday(\"monday\", including=True),\n \"Lunes siguiente al Día de Extremadura\",\n \"F\"\n )]\n else:\n return []\n\n def holiday_fiesta_nacional_de_cataluna(self):\n if self.year in [2011, 2016, 2022]:\n return []\n else:\n return [Holiday(\n self.locale,\n \"CT\",\n SmartDayArrow(self.year, 9, 11),\n \"Fiesta Nacional de Cataluña\",\n \"F\"\n )]\n\n def holiday_la_bien_aparecida(self):\n if self.year in [2011, 2012, 2014, 2014, 2015, 2016, 2017, 2018, 2020, 2021, 2022, 2023]:\n return [Holiday(\n self.locale,\n \"CB\",\n SmartDayArrow(self.year, 9, 15),\n \"La Bien Aparecida\",\n \"RF\")]\n else:\n return []\n\n def holiday_80_aniversario_del_primer_gobierno_vasco(self):\n if self.year in [2016]:\n return [Holiday(\n self.locale,\n \"PV\",\n SmartDayArrow(self.year, 10, 7),\n \"80º aniversario del primer Gobierno Vasco\",\n \"F\"\n )]\n else:\n return []\n\n def holiday_dia_de_la_comunitat_valenciana(self):\n if self.year in [2011, 2016, 2022]:\n return []\n else:\n return [Holiday(\n self.locale,\n \"VC\",\n SmartDayArrow(self.year, 10, 9),\n \"Día de la Comunitat Valenciana\",\n \"F\"\n )]\n\n def holiday_lunes_siguiente_a_la_fiesta_nacional_de_espana(self):\n if self.year == 2014:\n regions = [\"AN\", \"AR\", \"AS\", \"CE\", \"CL\", \"EX\"]\n else:\n return []\n\n return [Holiday(\n self.locale,\n region,\n SmartDayArrow(self.year, 10, 12).shift_to_weekday(\"monday\", including=True),\n \"Lunes siguiente a la Fiesta Nacional de España\",\n \"F\"\n ) for region in regions]\n\n def holiday_dia_del_pais_vasco_euskadiko_eguna(self):\n if self.year in [2011, 2012, 2013, 2014]:\n return [Holiday(\n self.locale,\n \"PV\",\n SmartDayArrow(self.year, 10, 25),\n \"Día del País Vasco-Euskadiko Eguna\",\n \"F\"\n )]\n else:\n return []\n\n def holiday_lunes_siguiente_a_todos_los_santos(self):\n if self.year == 2015:\n regions = [\"AN\", \"AR\", \"AS\", \"CB\", \"CE\", \"CL\", \"CN\", \"EX\", \"GA\", \"IB\"]\n elif self.year == 2020:\n regions = [\"AN\", \"AR\", \"AS\", \"CL\", \"EX\", \"MD\"]\n else:\n return []\n\n return [Holiday(\n self.locale,\n region,\n SmartDayArrow(self.year, 11, 1).shift_to_weekday(\"monday\", including=True),\n \"Lunes siguiente a Todos los Santos\",\n \"F\"\n ) for region in regions]\n\n def holiday_lunes_siguiente_al_dia_de_la_constitucion_espanola(self):\n if self.year == 2015:\n regions = [\"AN\", \"AR\", \"AS\", \"CE\", \"CL\", \"CM\", \"EX\", \"IB\", \"MC\", \"ML\", \"RI\", \"VC\"]\n elif self.year == 2020:\n regions = [\"AN\", \"AR\", \"AS\", \"CE\", \"CL\", \"CN\", \"EX\", \"IB\", \"MC\", \"MD\", \"ML\", \"NC\", \"RI\"]\n else:\n return []\n\n return [Holiday(\n self.locale,\n region,\n SmartDayArrow(self.year, 12, 6).shift_to_weekday(\"monday\", including=True),\n \"Lunes siguiente al Día de la Constitución Española\",\n \"F\"\n ) for region in regions]\n\n def holiday_lunes_siguiente_a_la_inmaculada_concepcion(self):\n if self.year == 2013:\n regions = [\"AN\", \"AR\", \"AS\", \"CE\", \"CL\", \"EX\", \"MC\", \"RI\"]\n elif self.year == 2019:\n regions = [\"AN\", \"AR\", \"AS\", \"CB\", \"CL\", \"EX\", \"MD\", \"ML\", \"RI\"]\n else:\n return []\n\n return [Holiday(\n self.locale,\n region,\n SmartDayArrow(self.year, 12, 8).shift_to_weekday(\"monday\", including=True),\n \"Lunes siguiente a La Inmaculada Concepción\",\n \"RF\"\n ) for region in regions]\n\n def holiday_lunes_siguiente_a_la_natividad_del_senor(self):\n if self.year == 2022:\n regions = [\"AN\", \"AR\", \"AS\", \"CB\", \"CL\", \"CM\", \"CN\", \"EX\", \"IB\", \"MC\", \"MD\", \"ML\", \"NC\", \"RI\"]\n else:\n return []\n\n return [Holiday(\n self.locale,\n region,\n SmartDayArrow(self.year, 12, 26),\n \"Lunes siguiente a la Natividad del Señor\",\n \"RF\"\n ) for region in regions]\n\n def holiday_san_esteban(self):\n if self.year == 2011:\n regions = [\"AN\", \"AR\", \"AS\", \"CE\", \"CL\", \"CN\", \"CT\", \"EX\", \"IB\", \"ML\", \"NC\"]\n elif self.year == 2012:\n regions = [\"CT\"]\n elif self.year == 2013:\n regions = [\"CT\", \"IB\"]\n elif self.year == 2014:\n regions = [\"CT\", \"IB\"]\n elif self.year == 2015:\n regions = [\"CT\"]\n elif self.year == 2016:\n regions = [\"AN\", \"AR\", \"AS\", \"CB\", \"CE\", \"CL\", \"CM\", \"CT\", \"EX\", \"IB\", \"MC\", \"MD\", \"ML\", \"NC\", \"VC\"]\n elif self.year == 2017:\n regions = [\"CT\"]\n elif self.year == 2018:\n regions = [\"CL\"]\n elif self.year == 2019:\n regions = [\"CT\", \"IB\"]\n elif self.year == 2020:\n regions = [\"CT\", \"IB\"]\n elif self.year == 2022:\n regions = [\"CT\"]\n elif self.year == 2023:\n regions = [\"CT\"]\n else:\n return []\n\n return [Holiday(\n self.locale,\n region,\n SmartDayArrow(self.year, 12, 26),\n \"San Esteban\",\n \"NRF\" if regions == [\"\"] else \"RF\"\n ) for region in regions]\n\n def holiday_jueves_santo(self):\n if self.year in [2011, 2016, 2017, 2022]:\n regions = [\"AN\", \"AR\", \"AS\", \"CB\", \"CE\", \"CL\", \"CM\", \"CN\", \"EX\", \"GA\", \"IB\", \"MC\", \"MD\", \"ML\", \"NC\", \"PV\", \"RI\", \"VC\"]\n elif self.year in [2012, 2013, 2014, 2015, 2019, 2020, 2021]:\n regions = [\"AN\", \"AR\", \"AS\", \"CB\", \"CE\", \"CL\", \"CM\", \"CN\", \"EX\", \"GA\", \"IB\", \"MC\", \"MD\", \"ML\", \"NC\", \"PV\", \"RI\"]\n elif self.year in [2018]:\n regions = [\"AN\", \"AR\", \"AS\", \"CE\", \"CL\", \"CM\", \"CN\", \"EX\", \"GA\", \"IB\", \"MC\", \"MD\", \"ML\", \"NC\", \"PV\", \"RI\"]\n elif self.year in [2023]:\n regions = [\"AN\", \"AR\", \"AS\", \"CB\", \"CE\", \"CL\", \"CM\", \"CN\", \"EX\", \"GA\", \"IB\", \"MC\", \"MD\", \"ML\", \"NC\", \"PV\", \"VC\"]\n else:\n return []\n\n return [Holiday(\n self.locale,\n region,\n easter(self.year, self.easter_type).shift(days=-3),\n \"Jueves Santo\",\n \"NRV\" if regions == [\"\"] else \"RV\"\n ) for region in regions]\n\n def holiday_lunes_de_pascua(self):\n if self.year == 2011:\n regions = [\"CT\", \"IB\", \"NC\", \"PV\", \"RI\", \"VC\"]\n elif self.year == 2012:\n regions = [\"CT\", \"IB\", \"NC\", \"VC\"]\n elif self.year == 2013:\n regions = [\"CB\", \"CT\", \"IB\", \"NC\", \"PV\", \"RI\", \"VC\"]\n elif self.year == 2014:\n regions = [\"CM\", \"CT\", \"NC\", \"PV\", \"RI\", \"VC\"]\n elif self.year == 2015:\n regions = [\"CB\", \"CM\", \"CT\", \"IB\", \"NC\", \"PV\", \"RI\", \"VC\"]\n elif self.year == 2016:\n regions = [\"CT\", \"IB\", \"NC\", \"PV\", \"RI\", \"VC\"]\n elif self.year == 2017:\n regions = [\"CT\", \"IB\", \"NC\", \"PV\", \"RI\", \"VC\"]\n elif self.year == 2018:\n regions = [\"CL\", \"IB\", \"NC\", \"PV\", \"VC\"]\n elif self.year == 2019:\n regions = [\"CB\", \"CM\", \"CT\", \"IB\", \"NC\", \"PV\", \"RI\", \"VC\"]\n elif self.year == 2020:\n regions = [\"CB\", \"CM\", \"CT\", \"IB\", \"NC\", \"PV\", \"RI\", \"VC\"]\n elif self.year == 2021:\n regions = [\"CT\", \"IB\", \"NC\", \"PV\", \"RI\", \"VC\"]\n elif self.year == 2022:\n regions = [\"CN\", \"CT\", \"IB\", \"NC\", \"PV\", \"RI\", \"VC\"]\n elif self.year == 2023:\n regions = [\"CT\", \"IB\", \"MC\", \"NC\", \"PV\", \"RI\"]\n else:\n return []\n\n return [Holiday(\n self.locale,\n region,\n easter(self.year, self.easter_type).shift_to_weekday(\"monday\", including=True),\n \"Lunes de Pascua\",\n \"NRV\" if regions == [\"\"] else \"RV\"\n ) for region in regions]\n\n def holiday_lunes_de_pascua_granada(self):\n if self.year == 2011:\n date = SmartDayArrow(self.year, 6, 13)\n elif self.year == 2016:\n date = SmartDayArrow(self.year, 5, 16)\n elif self.year == 2022:\n date = SmartDayArrow(self.year, 6, 6)\n else:\n return []\n\n return [Holiday(\n self.locale,\n \"CT\",\n date,\n \"Lunes de Pascua Granada\",\n \"F\"\n )]\n\n def holiday_corpus_christi(self):\n if self.year in [2012, 2013, 2016, 2017, 2019, 2020, 2021, 2022, 2023]:\n regions = [\"CM\"]\n elif self.year in [2011, 2014, 2015]:\n regions = [\"CM\", \"MD\"]\n else:\n return []\n\n return [Holiday(\n self.locale,\n region,\n easter(self.year, self.easter_type).shift(days=60),\n \"Corpus Christi\",\n \"RV\"\n ) for region in regions]\n\n def holiday_eid_fitr(self):\n if self.year == 2022:\n date = SmartDayArrow(self.year, 5, 3)\n elif self.year == 2023:\n date = SmartDayArrow(self.year, 4, 21)\n else:\n return []\n\n return [Holiday(\n self.locale,\n \"ML\",\n date,\n \"Fiesta del Eid Fitr\",\n \"RV\"\n )]\n\n def holiday_eidul_adha(self):\n if self.year == 2012:\n date = SmartDayArrow(self.year, 10, 27)\n elif self.year == 2013:\n date = SmartDayArrow(self.year, 10, 15)\n elif self.year == 2014:\n date = SmartDayArrow(self.year, 10, 6)\n elif self.year == 2015:\n date = SmartDayArrow(self.year, 9, 25)\n elif self.year == 2016:\n date = SmartDayArrow(self.year, 9, 12)\n elif self.year == 2017:\n date = SmartDayArrow(self.year, 9, 1)\n elif self.year == 2018:\n date = SmartDayArrow(self.year, 8, 22)\n elif self.year == 2019:\n date = SmartDayArrow(self.year, 8, 12)\n elif self.year == 2020:\n date = SmartDayArrow(self.year, 7, 31)\n elif self.year == 2021:\n date = SmartDayArrow(self.year, 7, 20)\n elif self.year == 2022:\n date = SmartDayArrow(self.year, 7, 9)\n elif self.year == 2023:\n date = SmartDayArrow(self.year, 6, 29)\n else:\n return []\n\n return [Holiday(\n self.locale,\n \"CE\",\n date,\n \"Fiesta del Sacrificio (Eidul Adha)\",\n \"RV\"\n )]\n\n def holiday_aid_al_adha(self):\n if self.year == 2022:\n date = SmartDayArrow(self.year, 7, 11)\n elif self.year == 2023:\n date = SmartDayArrow(self.year, 6, 29)\n else:\n return []\n\n return [Holiday(\n self.locale,\n \"ML\",\n date,\n \"Fiesta del Sacrificio (Aid Al Adha)\",\n \"RV\"\n )]\n\n def holiday_lunes_siguiente_al_eidul_adha(self):\n if self.year == 2011:\n date = SmartDayArrow(self.year, 11, 7)\n else:\n return []\n\n return [Holiday(\n self.locale,\n \"CE\",\n date,\n \"Lunes siguiente a la Fiesta del Sacrificio (Eidul Adha)\",\n \"RV\"\n )]\n\n def holiday_aid_el_kebir(self):\n if self.year == 2011:\n date = SmartDayArrow(self.year, 11, 7)\n elif self.year == 2012:\n date = SmartDayArrow(self.year, 10, 26)\n elif self.year == 2013:\n date = SmartDayArrow(self.year, 10, 15)\n elif self.year == 2014:\n date = SmartDayArrow(self.year, 10, 4)\n elif self.year == 2015:\n date = SmartDayArrow(self.year, 9, 25)\n elif self.year == 2016:\n date = SmartDayArrow(self.year, 9, 12)\n elif self.year == 2017:\n date = SmartDayArrow(self.year, 9, 1)\n elif self.year == 2018:\n date = SmartDayArrow(self.year, 8, 22)\n elif self.year == 2019:\n date = SmartDayArrow(self.year, 8, 12)\n elif self.year == 2020:\n date = SmartDayArrow(self.year, 7, 31)\n elif self.year == 2021:\n date = SmartDayArrow(self.year, 7, 21)\n else:\n return []\n\n return [Holiday(\n self.locale,\n \"ML\",\n date,\n \"Fiesta del Sacrificio (Aid El Kebir)\",\n \"RV\"\n )]\n" }, { "alpha_fraction": 0.32057416439056396, "alphanum_fraction": 0.3652312457561493, "avg_line_length": 21.008771896362305, "blob_id": "2fcd7afb73c671ee06917e7d88f7bf2e2671e483", "content_id": "fa74fd7eba13218b29dd278095d99ce869726cfd", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2524, "license_type": "permissive", "max_line_length": 58, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_US-2018] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2018-01-01',\n 'description': 'Año Neuvo',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-01-15',\n 'description': 'Cumpleaños de Martin Luther King, Jr.',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-02-19',\n 'description': 'Día del Presidente',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-04-16',\n 'description': 'Día del Patriota',\n 'locale': 'es-US',\n 'notes': '',\n 'region': 'MA',\n 'type': 'V'\n },\n {\n 'date': '2018-04-16',\n 'description': 'Día del Patriota',\n 'locale': 'es-US',\n 'notes': '',\n 'region': 'ME',\n 'type': 'V'\n },\n {\n 'date': '2018-05-28',\n 'description': 'Día de los Caídos',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-07-04',\n 'description': 'Día de la Independiencia',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-09-03',\n 'description': 'Día del Trabajo',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-10-08',\n 'description': 'Día de Columbus',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-11-11',\n 'description': 'Día de los Veteranos',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-11-22',\n 'description': 'Día de Acción de Gracias',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-11-23',\n 'description': 'Día después de Acción de Gracias',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-12-24',\n 'description': 'Nochebuena',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-12-25',\n 'description': 'Navidad',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3094867765903473, "alphanum_fraction": 0.35510626435279846, "avg_line_length": 20.44444465637207, "blob_id": "59271774919da20850f9ef7406e05f37c1567c54", "content_id": "675c44dcc5e0714973dffde940aa4850e9bce024", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1929, "license_type": "permissive", "max_line_length": 50, "num_lines": 90, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-01-02',\n 'description': \"Day after New Year's Day\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2015-02-06',\n 'description': 'Waitangi Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-04-03',\n 'description': 'Good Friday',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-06',\n 'description': 'Easter Monday',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-25',\n 'description': 'ANZAC Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-06-01',\n 'description': \"Queen's Birthday\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2015-10-26',\n 'description': 'Labour Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-26',\n 'description': 'Boxing Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-28',\n 'description': 'Boxing Day (observed)',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n }\n]" }, { "alpha_fraction": 0.5982142686843872, "alphanum_fraction": 0.5982142686843872, "avg_line_length": 15, "blob_id": "3cc2cb42409959db37a6263f8bce023f22f793bd", "content_id": "1504e470056de151ced4b0b7efcd047878ea3d3f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 112, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/SE.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass SE(Country):\n id = \"SE\"\n languages = [\"sv\"]\n default_lang = \"sv\"\n" }, { "alpha_fraction": 0.32270917296409607, "alphanum_fraction": 0.3673306703567505, "avg_line_length": 21.026315689086914, "blob_id": "b7b082a6208d82d0c0d9001d8cd3c91b98e9eb9e", "content_id": "32f2fb1ba1182b8a73ad7fe1e993207410e76c90", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2510, "license_type": "permissive", "max_line_length": 53, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-01-02',\n 'description': \"New Year's Day (Supplement)\",\n 'locale': 'en-ZA',\n 'notes': 'Supplement holiday',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-03-21',\n 'description': 'Human Rights Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-04-07',\n 'description': 'Good Friday',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-10',\n 'description': 'Family Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-27',\n 'description': 'Freedom Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-01',\n 'description': \"Worker's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-06-16',\n 'description': 'Youth Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-08-09',\n 'description': \"National Women's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-09-24',\n 'description': 'Heritage Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-09-25',\n 'description': 'Heritage Day (Supplement)',\n 'locale': 'en-ZA',\n 'notes': 'Supplement holiday',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-12-16',\n 'description': 'Day of Reconciliation',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-12-26',\n 'description': 'Day of Goodwill',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31302428245544434, "alphanum_fraction": 0.35894039273262024, "avg_line_length": 20.37735939025879, "blob_id": "c8a0018afdab47eec617b9b8df346ef09f4e56e5", "content_id": "d330a43b94bc22eed408ca5c26782fb5b0dd5b4d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2273, "license_type": "permissive", "max_line_length": 47, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': 'Nyårsdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-01-06',\n 'description': 'Trettondedagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Långfredagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-21',\n 'description': 'Påskdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Annandag påsk',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Första maj',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-05-30',\n 'description': 'Kristi himmelfärdsdag',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-09',\n 'description': 'Pingst',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-22',\n 'description': 'Midsommardagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-11-02',\n 'description': 'Alla helgons dag',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-12-06',\n 'description': 'Självständighetsdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Juldagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Annandag jul',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.32268083095550537, "alphanum_fraction": 0.367143452167511, "avg_line_length": 21.41025733947754, "blob_id": "27a7ec859b3d7b4fa8fe0bb99c8a8c8a9d885779", "content_id": "d01db2556c388f630ecb069e7ae05ea1382dac29", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12276, "license_type": "permissive", "max_line_length": 74, "num_lines": 546, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Año Nuevo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-01-06',\n 'description': 'Epifanía del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-03-01',\n 'description': 'Día de Andalucía',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'F'\n },\n {\n 'date': '2021-03-01',\n 'description': 'Día de las Illes Balears',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'F'\n },\n {\n 'date': '2021-03-13',\n 'description': 'Estatuto de Autonomía de la Ciudad de Melilla',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'F'\n },\n {\n 'date': '2021-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'RF'\n },\n {\n 'date': '2021-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'RF'\n },\n {\n 'date': '2021-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'RF'\n },\n {\n 'date': '2021-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'RF'\n },\n {\n 'date': '2021-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RF'\n },\n {\n 'date': '2021-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'RF'\n },\n {\n 'date': '2021-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'RF'\n },\n {\n 'date': '2021-04-01',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-01',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-01',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-01',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-01',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-01',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-01',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-01',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CN',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-01',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-01',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-01',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-01',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-01',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-01',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-01',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-01',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-01',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Viernes Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-04',\n 'description': 'Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-23',\n 'description': 'Fiesta de Castilla y León',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'F'\n },\n {\n 'date': '2021-04-23',\n 'description': 'San Jorge / Día de Aragón',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RF'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Fiesta del Trabajo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-03',\n 'description': 'Lunes siguiente al Día de la Comunidad de Madrid',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'F'\n },\n {\n 'date': '2021-05-17',\n 'description': 'Día de las Letras Gallegas',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'F'\n },\n {\n 'date': '2021-05-31',\n 'description': 'Día de Castilla-La Mancha',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'F'\n },\n {\n 'date': '2021-06-03',\n 'description': 'Corpus Christi',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'RV'\n },\n {\n 'date': '2021-06-09',\n 'description': 'Día de la Región de Murcia',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'F'\n },\n {\n 'date': '2021-06-09',\n 'description': 'Día de La Rioja',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'F'\n },\n {\n 'date': '2021-06-24',\n 'description': 'San Juan',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'RF'\n },\n {\n 'date': '2021-06-24',\n 'description': 'San Juan',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'RF'\n },\n {\n 'date': '2021-07-20',\n 'description': 'Fiesta del Sacrificio (Eidul Adha)',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'RV'\n },\n {\n 'date': '2021-07-21',\n 'description': 'Fiesta del Sacrificio (Aid El Kebir)',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RV'\n },\n {\n 'date': '2021-07-28',\n 'description': 'Día de las Instituciones de Cantabria',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'F'\n },\n {\n 'date': '2021-08-15',\n 'description': 'Asunción de la Virgen',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-08-16',\n 'description': 'Lunes siguiente a la Asunción de la Virgen',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'RF'\n },\n {\n 'date': '2021-08-16',\n 'description': 'Lunes siguiente a la Asunción de la Virgen',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RF'\n },\n {\n 'date': '2021-08-16',\n 'description': 'Lunes siguiente a la Asunción de la Virgen',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'RF'\n },\n {\n 'date': '2021-08-16',\n 'description': 'Lunes siguiente a la Asunción de la Virgen',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'RF'\n },\n {\n 'date': '2021-08-16',\n 'description': 'Lunes siguiente a la Asunción de la Virgen',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CN',\n 'type': 'RF'\n },\n {\n 'date': '2021-09-02',\n 'description': 'Día de Ceuta',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'F'\n },\n {\n 'date': '2021-09-08',\n 'description': 'Día de Asturias',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'F'\n },\n {\n 'date': '2021-09-08',\n 'description': 'Día de Extremadura',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'F'\n },\n {\n 'date': '2021-09-11',\n 'description': 'Fiesta Nacional de Cataluña',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'F'\n },\n {\n 'date': '2021-09-15',\n 'description': 'La Bien Aparecida',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'RF'\n },\n {\n 'date': '2021-10-09',\n 'description': 'Día de la Comunitat Valenciana',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'F'\n },\n {\n 'date': '2021-10-12',\n 'description': 'Fiesta Nacional de España',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Todos los Santos',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-06',\n 'description': 'Día de la Constitución Española',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-08',\n 'description': 'Inmaculada Concepción',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Natividad del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.5169667601585388, "alphanum_fraction": 0.5446676015853882, "avg_line_length": 28.1616153717041, "blob_id": "8910b296c28c20bd601716a1776605365a319dd0", "content_id": "bb3c13d0d559ad2c515edb3b43f9d2fffad67758", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2901, "license_type": "permissive", "max_line_length": 88, "num_lines": 99, "path": "/src/holidata/holidays/es-CO.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom holidata.utils import SmartDayArrow\nfrom .holidays import Holiday, Locale\n\n\"\"\"\nLEY 51 DE 1983\nhttp://www.suin-juriscol.gov.co/viewDocument.asp?id=1605519\n\"\"\"\n\nclass es_CO(Locale):\n \"\"\"\n 01-01: [NF] Año Nuevo\n 05-01: [NF] Día del Trabajo\n 07-20: [NF] Grito de Independencia\n 08-07: [NF] Batalla de Boyacá\n 12-08: [NRF] Inmaculada Concepción\n 12-25: [NRF] Navidad\n 3 days before Easter: [NRV] Jueves Santo\n 2 days before Easter: [NRV] Viernes Santo\n Easter: [NRV] Domingo de Pascua\n 43 days after Easter: [NRV] La Ascensión del Señor\n 64 days after Easter: [NRV] Corpus Christi\n 71 days after Easter: [NRV] El Sagrado Corazón de Jesús\n \"\"\"\n\n locale = \"es-CO\"\n easter_type = EASTER_WESTERN\n\n def holiday_reyes(self):\n \"\"\"First Monday after January 6.\"\"\"\n return [Holiday(\n self.locale,\n \"\",\n SmartDayArrow(self.year, 1, 6).shift_to_weekday(\"monday\", including=True),\n \"Día de los Reyes Magos\",\n \"NRV\"\n )]\n\n def holiday_san_jose(self):\n \"\"\"First Monday after March 19.\"\"\"\n return [Holiday(\n self.locale,\n \"\",\n SmartDayArrow(self.year, 3, 19).shift_to_weekday(\"monday\", including=True),\n \"Día de San José\",\n \"NRV\"\n )]\n\n def holiday_san_pedro_san_pablo(self):\n \"\"\"First Monday after June 29.\"\"\"\n return [Holiday(\n self.locale,\n \"\",\n SmartDayArrow(self.year, 6, 29).shift_to_weekday(\"monday\", including=True),\n \"San Pedro y San Pablo\",\n \"NRV\"\n )]\n\n def holiday_asuncion(self):\n \"\"\"First Monday after August 15.\"\"\"\n return [Holiday(\n self.locale,\n \"\",\n SmartDayArrow(self.year, 8, 15).shift_to_weekday(\"monday\", including=True),\n \"Asunción de la Virgen\",\n \"NRV\"\n )]\n\n def holiday_dia_raza(self):\n \"\"\"First Monday after October 12.\"\"\"\n return [Holiday(\n self.locale,\n \"\",\n SmartDayArrow(self.year, 10, 12).shift_to_weekday(\"monday\", including=True),\n \"Día de la Raza\",\n \"NV\"\n )]\n\n def holiday_todos_santos(self):\n \"\"\"First Monday after November 1.\"\"\"\n return [Holiday(\n self.locale,\n \"\",\n SmartDayArrow(self.year, 11, 1).shift_to_weekday(\"monday\", including=True),\n \"Todos los Santos\",\n \"NRV\"\n )]\n\n def holiday_independencia_cartagena(self):\n \"\"\"First Monday after November 11.\"\"\"\n return [Holiday(\n self.locale,\n \"\",\n SmartDayArrow(self.year, 11, 11).shift_to_weekday(\"monday\", including=True),\n \"Independencia de Cartagena\",\n \"NV\"\n )]\n\n" }, { "alpha_fraction": 0.5982142686843872, "alphanum_fraction": 0.5982142686843872, "avg_line_length": 15, "blob_id": "cbbd544205c3d958981eb09d19b95c19c399e5df", "content_id": "368312780c089526b4eb97c55cb0b4757846ac23", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 112, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/CO.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass CO(Country):\n id = \"CO\"\n languages = [\"es\"]\n default_lang = \"es\"\n" }, { "alpha_fraction": 0.32524430751800537, "alphanum_fraction": 0.36954396963119507, "avg_line_length": 21.492673873901367, "blob_id": "dfcfd5fcfe38f3a753261f57313f32399a405610", "content_id": "6a50a187877409ff3c0388756f713a8f4a9edc1d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12320, "license_type": "permissive", "max_line_length": 72, "num_lines": 546, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Año Nuevo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-01-06',\n 'description': 'Epifanía del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-02-28',\n 'description': 'Día de Andalucía',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'F'\n },\n {\n 'date': '2014-03-01',\n 'description': 'Día de las Illes Balears',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'F'\n },\n {\n 'date': '2014-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'RF'\n },\n {\n 'date': '2014-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RF'\n },\n {\n 'date': '2014-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RF'\n },\n {\n 'date': '2014-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'RF'\n },\n {\n 'date': '2014-04-17',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-17',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-17',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-17',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-17',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-17',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-17',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-17',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CN',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-17',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-17',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-17',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-17',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-17',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-17',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-17',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-17',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-17',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Viernes Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-20',\n 'description': 'Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-23',\n 'description': 'Fiesta de Castilla y León',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'F'\n },\n {\n 'date': '2014-04-23',\n 'description': 'San Jorge / Día de Aragón',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RF'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Fiesta del Trabajo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-02',\n 'description': 'Fiesta de la Comunidad de Madrid',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'F'\n },\n {\n 'date': '2014-05-17',\n 'description': 'Día de las Letras Gallegas',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'F'\n },\n {\n 'date': '2014-05-30',\n 'description': 'Día de Canarias',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CN',\n 'type': 'F'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Día de la Región de Murcia',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'F'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Día de La Rioja',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'F'\n },\n {\n 'date': '2014-06-19',\n 'description': 'Corpus Christi',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-19',\n 'description': 'Corpus Christi',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-24',\n 'description': 'San Juan',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'RF'\n },\n {\n 'date': '2014-07-25',\n 'description': 'Santiago Apóstol',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'RF'\n },\n {\n 'date': '2014-07-25',\n 'description': 'Santiago Apóstol / Día Nacional de Galicia',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'RF'\n },\n {\n 'date': '2014-08-15',\n 'description': 'Asunción de la Virgen',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-09-08',\n 'description': 'Día de Asturias',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'F'\n },\n {\n 'date': '2014-09-08',\n 'description': 'Día de Extremadura',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'F'\n },\n {\n 'date': '2014-09-11',\n 'description': 'Fiesta Nacional de Cataluña',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'F'\n },\n {\n 'date': '2014-09-15',\n 'description': 'La Bien Aparecida',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'RF'\n },\n {\n 'date': '2014-10-04',\n 'description': 'Fiesta del Sacrificio (Aid El Kebir)',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RV'\n },\n {\n 'date': '2014-10-06',\n 'description': 'Fiesta del Sacrificio (Eidul Adha)',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'RV'\n },\n {\n 'date': '2014-10-09',\n 'description': 'Día de la Comunitat Valenciana',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'F'\n },\n {\n 'date': '2014-10-12',\n 'description': 'Fiesta Nacional de España',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-10-13',\n 'description': 'Lunes siguiente a la Fiesta Nacional de España',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'F'\n },\n {\n 'date': '2014-10-13',\n 'description': 'Lunes siguiente a la Fiesta Nacional de España',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'F'\n },\n {\n 'date': '2014-10-13',\n 'description': 'Lunes siguiente a la Fiesta Nacional de España',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'F'\n },\n {\n 'date': '2014-10-13',\n 'description': 'Lunes siguiente a la Fiesta Nacional de España',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'F'\n },\n {\n 'date': '2014-10-13',\n 'description': 'Lunes siguiente a la Fiesta Nacional de España',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'F'\n },\n {\n 'date': '2014-10-13',\n 'description': 'Lunes siguiente a la Fiesta Nacional de España',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'F'\n },\n {\n 'date': '2014-10-25',\n 'description': 'Día del País Vasco-Euskadiko Eguna',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'F'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Todos los Santos',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-06',\n 'description': 'Día de la Constitución Española',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-12-08',\n 'description': 'Inmaculada Concepción',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Natividad del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'RF'\n }\n]" }, { "alpha_fraction": 0.3200301229953766, "alphanum_fraction": 0.3652108311653137, "avg_line_length": 20.778688430786133, "blob_id": "faf7da4f672ec2c59ec1a93ee872acef137d5a38", "content_id": "d87052cbfe0af9c59b8741acbcc580625d0c1487", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2663, "license_type": "permissive", "max_line_length": 56, "num_lines": 122, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Novo leto',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-01-02',\n 'description': 'Novo leto',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-02-08',\n 'description': 'Prešernov dan',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-04-04',\n 'description': 'Velikonočna nedelja',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Velikonočni ponedeljek',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-27',\n 'description': 'Dan upora proti okupatorju',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Praznik dela',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-02',\n 'description': 'Praznik dela',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Binkošti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-06-25',\n 'description': 'Dan državnosti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-08-15',\n 'description': 'Marijino vnebovzetje',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-10-31',\n 'description': 'Dan reformacije',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Dan spomina na mrtve',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Božič',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Dan samostojnosti in enotnosti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31640625, "alphanum_fraction": 0.3615451455116272, "avg_line_length": 20.745283126831055, "blob_id": "ee586f61159b4dfb6fe264edc6d20b8aded9212a", "content_id": "2f022264f62171491a73526b50674c29efc7f4cf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2304, "license_type": "permissive", "max_line_length": 51, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-03-21',\n 'description': 'Human Rights Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Good Friday',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Family Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-27',\n 'description': 'Freedom Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-01',\n 'description': \"Worker's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-02',\n 'description': \"Worker's Day (Supplement)\",\n 'locale': 'en-ZA',\n 'notes': 'Supplement holiday',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-06-16',\n 'description': 'Youth Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-08-09',\n 'description': \"National Women's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-09-24',\n 'description': 'Heritage Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-16',\n 'description': 'Day of Reconciliation',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Day of Goodwill',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.34407365322113037, "alphanum_fraction": 0.3878020644187927, "avg_line_length": 21.877193450927734, "blob_id": "57d8d79da0091044a7ddd1bc8e5113bd5afc64d1", "content_id": "02aefe8391039f5b1b9c98183746e98a967d0e21", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2650, "license_type": "permissive", "max_line_length": 88, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': 'Nový rok',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-01-01',\n 'description': 'Den obnovy samostatného českého státu',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-04-07',\n 'description': 'Velký pátek',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-10',\n 'description': 'Velikonoční pondělí',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-01',\n 'description': 'Svátek práce',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-08',\n 'description': 'Den vítězství',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-07-05',\n 'description': 'Den slovanských věrozvěstů Cyrila a Metoděje',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-07-06',\n 'description': 'Den upálení mistra Jana Husa',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-09-28',\n 'description': 'Den české státnosti',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-10-28',\n 'description': 'Den vzniku samostatného československého státu',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-11-17',\n 'description': 'Den boje za svobodu a demokracii a Mezinárodní den studentstva',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-12-24',\n 'description': 'Štědrý den',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-25',\n 'description': '1. svátek vánoční',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-26',\n 'description': '2. svátek vánoční',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3047436475753784, "alphanum_fraction": 0.3545759320259094, "avg_line_length": 20.306121826171875, "blob_id": "edbe1de0108b03fc77c50935c6ba5fdba320b34c", "content_id": "84a07fcfaa3ce821175a1b1bf5c983bca1f17ca6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2093, "license_type": "permissive", "max_line_length": 45, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': \"Jour de l'an\",\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-04-10',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-01',\n 'description': 'Fête du premier mai',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-08',\n 'description': 'Armistice 1945',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-18',\n 'description': 'Ascension',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-28',\n 'description': 'Pentecôte',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-29',\n 'description': 'Lundi de Pentecôte',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-07-14',\n 'description': 'Fête nationale',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-08-15',\n 'description': 'Assomption',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-11-01',\n 'description': 'Toussaint',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-11-11',\n 'description': 'Armistice 1918',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Noël',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.5075176358222961, "alphanum_fraction": 0.5523166656494141, "avg_line_length": 31.919191360473633, "blob_id": "d603edc0d4353d146597519b6612108be43c7196", "content_id": "76af52dc0c6b8b02fa6d710cd62df81ed26511c9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3263, "license_type": "permissive", "max_line_length": 133, "num_lines": 99, "path": "/src/holidata/holidays/de-DE.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom holidata.utils import SmartDayArrow\nfrom .holidays import Locale, Holiday\n\n\nclass de_DE(Locale):\n \"\"\"\n 01-01: [NF] Neujahr\n 01-06: [BW,BY,ST] [RF] Heilige drei Könige\n 05-01: [NF] Erster Maifeiertag\n 08-15: [SL] [RF] Mariä Himmelfahrt\n 10-03: [NRF] Tag der Deutschen Einheit\n 11-01: [BW,BY,NW,RP,SL] [RF] Allerheiligen\n 12-24: [NRF] Heilig Abend\n 12-25: [NRF] Weihnachtstag\n 12-26: [NRF] Zweiter Weihnachtstag\n 12-31: [NF] Silvester\n 2 days before Easter: [NRV] Karfreitag\n Easter: [NRV] Ostern\n 1 day after Easter: [NRV] Ostermontag\n 39 days after Easter: [NRV] Christi Himmelfahrt\n 49 days after Easter: [NRV] Pfingstsonntag\n 50 days after Easter: [NRV] Pfingstmontag\n 60 days after Easter: [BW,BY,HE,NW,RP,SL] [RV] Fronleichnam\n \"\"\"\n\n locale = \"de-DE\"\n easter_type = EASTER_WESTERN\n\n def holiday_buss_und_bettag(self):\n \"\"\"11 days before 4. sunday before 12-25: [SN] [RV] Buß- und Bettag\"\"\"\n\n return [Holiday(\n self.locale,\n \"SN\",\n SmartDayArrow(self.year, 12, 25).shift_to_weekday(\"sunday\", order=4, reverse=True).shift(days=-11),\n \"Buß- und Bettag\",\n \"RV\"\n )]\n\n def holiday_reformationstag(self):\n \"\"\"\n before 2018: 10-31: [BB, MV, SN, ST, TH] [RF] Reformationstag\n since 2018: 10-31: [BB, BH, HH, MV, NI, SH, SN, ST, TH] [RF] Reformationstag\n 2017: 10-31: [NRF] Reformationstag (national holiday because of 500th anniversary)\n\n \"\"\"\n if self.year == 2017:\n regions = [\"\"]\n elif self.year < 2018:\n regions = [\"BB\", \"MV\", \"SN\", \"ST\", \"TH\"]\n else:\n regions = [\"BB\", \"BH\", \"HH\", \"MV\", \"NI\", \"SH\", \"SN\", \"ST\", \"TH\"]\n\n return [Holiday(\n self.locale,\n region,\n SmartDayArrow(self.year, 10, 31),\n \"Reformationstag\",\n \"NRF\" if regions == [\"\"] else \"RF\"\n ) for region in regions]\n\n def holiday_frauentag(self):\n \"\"\"\n 03-08: [BE] [F] Frauentag\n\n Introduced 2019 for Berlin\n http://gesetze.berlin.de/jportal/?quelle=jlink&query=FeiertG+BE+%C2%A7+1&psml=bsbeprod.psml&max=true\n \"\"\"\n if self.year >= 2019:\n return [Holiday(\n self.locale,\n \"BE\",\n SmartDayArrow(self.year, 3, 8),\n \"Internationaler Frauentag\",\n \"F\"\n )]\n else:\n return []\n\n def holiday_tag_der_befreiung(self):\n \"\"\"\n 2020-05-08: [BE] [F] 75. Jahrestag der Befreiung vom Nationalsozialismus und der Beendigung des Zweiten Weltkrieges in Europa\n\n Introduced 2019 for Berlin\n http://gesetze.berlin.de/jportal/?quelle=jlink&query=FeiertG+BE+%C2%A7+1&psml=bsbeprod.psml&max=true\n \"\"\"\n if self.year == 2020:\n return [Holiday(\n self.locale,\n \"BE\",\n SmartDayArrow(self.year, 5, 8),\n \"75. Jahrestag der Befreiung vom Nationalsozialismus und der Beendigung des Zweiten Weltkrieges in Europa\",\n \"F\"\n )]\n else:\n return []\n" }, { "alpha_fraction": 0.31142035126686096, "alphanum_fraction": 0.3574855923652649, "avg_line_length": 20.275510787963867, "blob_id": "cb50dd4a422b48d2a7762b43d0d69edf95353008", "content_id": "a02031e7f5196fc7deb2aa98b9e0183b8bc0e17f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2088, "license_type": "permissive", "max_line_length": 47, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2017] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2017-01-01',\n 'description': 'Nytårsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Skærtorsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-14',\n 'description': 'Langfredag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-16',\n 'description': 'Påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-17',\n 'description': 'Anden påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-05-12',\n 'description': 'Store bededag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-05-25',\n 'description': 'Kristi himmelfartsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-04',\n 'description': 'Pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-05',\n 'description': 'Grundlovsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-06-05',\n 'description': 'Anden pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-12-25',\n 'description': 'Juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-26',\n 'description': 'Anden juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.5982142686843872, "alphanum_fraction": 0.5982142686843872, "avg_line_length": 15, "blob_id": "0c113c4c6b50b7739a1bc00ccc18cccac9655e6e", "content_id": "76e9361bd3936e935fe30b673a53af734f1586db", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 112, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/CZ.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass CZ(Country):\n id = \"CZ\"\n languages = [\"cs\"]\n default_lang = \"cs\"\n" }, { "alpha_fraction": 0.3314873278141022, "alphanum_fraction": 0.37579113245010376, "avg_line_length": 21.1842098236084, "blob_id": "641b3929fa328164a0c3da901e906cb2686ff82b", "content_id": "10a0d60b106c4ec0f707dfffc7e0f47a99e0dd9a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2768, "license_type": "permissive", "max_line_length": 93, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Πρωτοχρονιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-01-06',\n 'description': 'Θεοφάνεια',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-03-07',\n 'description': 'Καθαρά Δευτέρα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-03-25',\n 'description': 'Ευαγγελισμός της Θεοτόκου και Εθνική Ημέρα Ανεξαρτησίας της Ελλάδας',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-04-22',\n 'description': 'Μεγάλη Παρασκευή',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-23',\n 'description': 'Μεγάλο Σάββατο',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-24',\n 'description': 'Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-25',\n 'description': 'Δευτέρα του Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Πρωτομαγιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-06-13',\n 'description': 'Δευτέρα του Αγίου Πνεύματος',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Κοίμηση της Θεοτόκου',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-10-28',\n 'description': 'Ημέρα του ΌΧΙ',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Χριστούγεννα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Επόμενη ημέρα Χριστουγέννων',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31641605496406555, "alphanum_fraction": 0.36152881383895874, "avg_line_length": 20.58108139038086, "blob_id": "a02bf3afa16d36428149f95b64372405d308e927", "content_id": "0bed7d4eabc493da5f3b0285ff980af21af551d8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1596, "license_type": "permissive", "max_line_length": 48, "num_lines": 74, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-04-03',\n 'description': 'Good Friday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-06',\n 'description': 'Easter Monday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-04',\n 'description': 'Early May Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2015-05-25',\n 'description': 'Spring Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2015-08-31',\n 'description': 'August Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-26',\n 'description': 'Boxing Day',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-28',\n 'description': 'Boxing Day (observed)',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n }\n]" }, { "alpha_fraction": 0.320070743560791, "alphanum_fraction": 0.3648688495159149, "avg_line_length": 21.03896141052246, "blob_id": "069b85d17040608b67fe3f8fc9f5b7e60272d11d", "content_id": "e4496f57493c4416cd3954b50613ec01575b2494", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3406, "license_type": "permissive", "max_line_length": 53, "num_lines": 154, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2012] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2012-01-01',\n 'description': 'Año Nuevo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-01-09',\n 'description': 'Día de los Reyes Magos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-03-19',\n 'description': 'Día de San José',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-05',\n 'description': 'Jueves Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-06',\n 'description': 'Viernes Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-08',\n 'description': 'Domingo de Pascua',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-01',\n 'description': 'Día del Trabajo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-21',\n 'description': 'La Ascensión del Señor',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-06-11',\n 'description': 'Corpus Christi',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-06-18',\n 'description': 'El Sagrado Corazón de Jesús',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-07-02',\n 'description': 'San Pedro y San Pablo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-07-20',\n 'description': 'Grito de Independencia',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-08-07',\n 'description': 'Batalla de Boyacá',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-08-20',\n 'description': 'Asunción de la Virgen',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-10-15',\n 'description': 'Día de la Raza',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2012-11-05',\n 'description': 'Todos los Santos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-11-12',\n 'description': 'Independencia de Cartagena',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2012-12-08',\n 'description': 'Inmaculada Concepción',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-25',\n 'description': 'Navidad',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.30328208208084106, "alphanum_fraction": 0.35189032554626465, "avg_line_length": 20.305309295654297, "blob_id": "610a1a332cd209c44754fd29dd6eabfef3797c90", "content_id": "b2403f94c5ae5f6ad1054bf09b3b8c888ffe3020", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4817, "license_type": "permissive", "max_line_length": 49, "num_lines": 226, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-01-06',\n 'description': 'Heilige drei Könige',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '2',\n 'type': 'RF'\n },\n {\n 'date': '2022-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '6',\n 'type': 'RF'\n },\n {\n 'date': '2022-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '7',\n 'type': 'RF'\n },\n {\n 'date': '2022-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '8',\n 'type': 'RF'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-17',\n 'description': 'Ostersonntag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Ostermontag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Staatsfeiertag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-04',\n 'description': 'Florian',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '4',\n 'type': 'F'\n },\n {\n 'date': '2022-05-26',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-05',\n 'description': 'Pfingstsonntag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pfingstmontag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-16',\n 'description': 'Fronleichnam',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-09-24',\n 'description': 'Rupert',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '5',\n 'type': 'F'\n },\n {\n 'date': '2022-10-10',\n 'description': 'Tag der Volksabstimmung',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '2',\n 'type': 'F'\n },\n {\n 'date': '2022-10-26',\n 'description': 'Nationalfeiertag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-11-11',\n 'description': 'Martin',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '1',\n 'type': 'F'\n },\n {\n 'date': '2022-11-15',\n 'description': 'Leopold',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '9',\n 'type': 'F'\n },\n {\n 'date': '2022-11-15',\n 'description': 'Leopold',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '3',\n 'type': 'F'\n },\n {\n 'date': '2022-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-24',\n 'description': 'Heiliger Abend',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Christtag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Stefanitag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-31',\n 'description': 'Silvester',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3293954133987427, "alphanum_fraction": 0.37665045261383057, "avg_line_length": 21.146154403686523, "blob_id": "168dd75a07cd5f68267f4d42ac9cd30d65c775d0", "content_id": "ebb35ad2d55569ec076600a4b220b20a3f04d8be", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2900, "license_type": "permissive", "max_line_length": 49, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': 'Nýársdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Skírdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-25',\n 'description': 'Föstudagurinn langi',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-27',\n 'description': 'Páskadagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-28',\n 'description': 'Annar dagur páska',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-04-21',\n 'description': 'Sumardagurinn fyrsti',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2016-05-01',\n 'description': 'Verkalýðsdagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-05',\n 'description': 'Uppstigningardagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-15',\n 'description': 'Hvítasunnudagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-16',\n 'description': 'Annar dagur hvítasunnu',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-06-17',\n 'description': 'Þjóðhátíðardagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-08-01',\n 'description': 'Frídagur verslunarmanna',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2016-12-24',\n 'description': 'Aðfangadagur jóla',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-25',\n 'description': 'Jóladagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'Annar dagur jóla',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-31',\n 'description': 'Gamlársdagur',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31541725993156433, "alphanum_fraction": 0.36067891120910645, "avg_line_length": 20.439393997192383, "blob_id": "4179488a43126c83154c602a547573c0d1e111f8", "content_id": "1f82b48c604e624e388e92e14633d2914b0cf7a0", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1414, "license_type": "permissive", "max_line_length": 48, "num_lines": 66, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Good Friday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Easter Monday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-05-05',\n 'description': 'Early May Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-05-26',\n 'description': 'Spring Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-08-25',\n 'description': 'August Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Boxing Day',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.303668737411499, "alphanum_fraction": 0.35016345977783203, "avg_line_length": 20.184616088867188, "blob_id": "d67c505ea75cc93efe5358af70b7fb31746e6328", "content_id": "8e7189236c03696ab57cfb4b2aa0df716426a4d9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2757, "license_type": "permissive", "max_line_length": 42, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': 'Nyårsdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-01-06',\n 'description': 'Trettondedag jul',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-04-22',\n 'description': 'Långfredagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-24',\n 'description': 'Påskdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-25',\n 'description': 'Annandag påsk',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-05-01',\n 'description': 'Första maj',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-02',\n 'description': 'Kristi himmelsfärdsdag',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-06',\n 'description': 'Nationaldagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-12',\n 'description': 'Pingstdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-24',\n 'description': 'Midsommarafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2011-06-25',\n 'description': 'Midsommardagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2011-11-05',\n 'description': 'Alla helgons dag',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-12-24',\n 'description': 'Julafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Juldagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-26',\n 'description': 'Annandag jul',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-31',\n 'description': 'Nyårsafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.32209739089012146, "alphanum_fraction": 0.3670412003993988, "avg_line_length": 20.66216278076172, "blob_id": "657abdb4371635eb1ada17690d6fcec67a751cb6", "content_id": "d7b20b10ee879d0916e809fe37120bd7f190da79", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1738, "license_type": "permissive", "max_line_length": 52, "num_lines": 74, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Новый Год',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-01-07',\n 'description': 'Рождество Христово',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-02-23',\n 'description': 'День защитника Отечества',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-03-08',\n 'description': 'Международный женский день',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Праздник весны и труда',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-02',\n 'description': 'Пасха',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-09',\n 'description': 'День Победы',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-06-12',\n 'description': 'День России',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-11-04',\n 'description': 'День народного единства',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31541725993156433, "alphanum_fraction": 0.36067891120910645, "avg_line_length": 20.439393997192383, "blob_id": "8cd78aaf569ee605e8223d9f380bd6276c91116c", "content_id": "633df89d1d843a448ca7f51b43730c2a7f027e00", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1414, "license_type": "permissive", "max_line_length": 48, "num_lines": 66, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Good Friday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Easter Monday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-05-06',\n 'description': 'Early May Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2019-05-27',\n 'description': 'Spring Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2019-08-26',\n 'description': 'August Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Boxing Day',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.32693910598754883, "alphanum_fraction": 0.37322768568992615, "avg_line_length": 21.632076263427734, "blob_id": "17dd2ff29f31ad4fcb2ffe4b40531e18bfe83046", "content_id": "2515b21fd2d8456a7662f64e3a0af8f3882da101", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2427, "license_type": "permissive", "max_line_length": 65, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': 'Yılbaşı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-04-23',\n 'description': 'Ulusal Egemenlik ve Çocuk Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-05-01',\n 'description': 'Emek ve Dayanışma Günü',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-05-19',\n 'description': \"Atatürk'ü Anma, Gençlik ve Spor Bayramı\",\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-08-30',\n 'description': 'Zafer Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-08-30',\n 'description': 'Ramazan Bayramı (1. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-08-31',\n 'description': 'Ramazan Bayramı (2. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-09-01',\n 'description': 'Ramazan Bayramı (3. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-10-29',\n 'description': 'Cumhuriyet Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-11-06',\n 'description': 'Kurban Bayramı (1. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-11-07',\n 'description': 'Kurban Bayramı (2. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-11-08',\n 'description': 'Kurban Bayramı (3. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-11-09',\n 'description': 'Kurban Bayramı (4. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n }\n]" }, { "alpha_fraction": 0.31509268283843994, "alphanum_fraction": 0.3614298403263092, "avg_line_length": 20.38679313659668, "blob_id": "7b1bc3dcee823ed5e3090f38cf30a270edbee9ab", "content_id": "5dbcc940700ad0fc538d558dbeb9bb446ecb559f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2293, "license_type": "permissive", "max_line_length": 42, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Uudenvuodenpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-01-06',\n 'description': 'Loppiainen',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Pitkäperjantai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-17',\n 'description': 'Pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-18',\n 'description': '2. pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Vappu',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-26',\n 'description': 'Helatorstai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-05',\n 'description': 'Helluntaipäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-25',\n 'description': 'Juhannuspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-11-05',\n 'description': 'Pyhäinpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-12-06',\n 'description': 'Itsenäisyyspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Joulupäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Tapaninpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3200301229953766, "alphanum_fraction": 0.3652108311653137, "avg_line_length": 20.778688430786133, "blob_id": "02e0aca58517e19cd3e3f5a8e004946a119b7872", "content_id": "074a50f8988037cd0b1aefc9d1fe2d930ce64b1b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2663, "license_type": "permissive", "max_line_length": 56, "num_lines": 122, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': 'Novo leto',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-01-02',\n 'description': 'Novo leto',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-02-08',\n 'description': 'Prešernov dan',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-04-24',\n 'description': 'Velikonočna nedelja',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-25',\n 'description': 'Velikonočni ponedeljek',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-27',\n 'description': 'Dan upora proti okupatorju',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-05-01',\n 'description': 'Praznik dela',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-05-02',\n 'description': 'Praznik dela',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-13',\n 'description': 'Binkošti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-25',\n 'description': 'Dan državnosti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-08-15',\n 'description': 'Marijino vnebovzetje',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-10-31',\n 'description': 'Dan reformacije',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-11-01',\n 'description': 'Dan spomina na mrtve',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Božič',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-12-26',\n 'description': 'Dan samostojnosti in enotnosti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3381712734699249, "alphanum_fraction": 0.3817126154899597, "avg_line_length": 21.598360061645508, "blob_id": "93792128122ee5dd3f4fca28e2ccd59ea7f32df8", "content_id": "58d00757b342812ad64344d7643045d6e1f43160", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2790, "license_type": "permissive", "max_line_length": 58, "num_lines": 122, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2012] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2012-01-01',\n 'description': 'Deň vzniku Slovenskej republiky',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-01-06',\n 'description': 'Zjavenie Pána / Traja králi',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-04-06',\n 'description': 'Veľký piatok',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-09',\n 'description': 'Veľkonočný pondelok',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-01',\n 'description': 'Sviatok práce',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-08',\n 'description': 'Deň víťazstva nad fašizmom',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-07-05',\n 'description': 'Sviatok svätého Cyrila a Metoda',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-08-29',\n 'description': 'Výročie SNP',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-09-01',\n 'description': 'Deň Ústavy Slovenskej republiky',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-09-15',\n 'description': 'Sedembolestná Panna Mária',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-11-01',\n 'description': 'Sviatok všetkých svätých',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-11-17',\n 'description': 'Deň boja za slobodu a demokraciu',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-12-24',\n 'description': 'Štedrý deň',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-25',\n 'description': 'Prvý sviatok vianočný',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-26',\n 'description': 'Druhý sviatok vianočný',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.5702479481697083, "alphanum_fraction": 0.6040570735931396, "avg_line_length": 27.319149017333984, "blob_id": "542b8dd6c6721012dd90db45efde3bd0fc6c9739", "content_id": "d6991f3f6afb2ddf35908155a020cb2e48d7b555", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1581, "license_type": "permissive", "max_line_length": 83, "num_lines": 47, "path": "/src/holidata/holidays/el-GR.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_ORTHODOX\n\nfrom holidata.utils import SmartDayArrow, easter\nfrom .holidays import Holiday, Locale\n\n\nclass el_GR(Locale):\n \"\"\"\n 01-01: [NF] Πρωτοχρονιά\n 01-06: [NRF] Θεοφάνεια\n 03-25: [NF] Ευαγγελισμός της Θεοτόκου και Εθνική Ημέρα Ανεξαρτησίας της Ελλάδας\n 08-15: [NRF] Κοίμηση της Θεοτόκου\n 10-28: [NF] Ημέρα του ΌΧΙ\n 12-25: [NRF] Χριστούγεννα\n 12-26: [NRF] Επόμενη ημέρα Χριστουγέννων\n 48 days before Easter: [NRV] Καθαρά Δευτέρα\n 2 days before Easter: [NRV] Μεγάλη Παρασκευή\n 1 day before Easter: [NRV] Μεγάλο Σάββατο\n Easter: [NRV] Πάσχα\n 1 day after Easter: [NRV] Δευτέρα του Πάσχα\n 50 days after Easter: [NRV] Δευτέρα του Αγίου Πνεύματος\n \"\"\"\n\n locale = \"el-GR\"\n easter_type = EASTER_ORTHODOX\n\n def holiday_may_day(self):\n \"\"\"\n 05-01: [NF] Πρωτομαγιά\n Postponed if it collides with Easter\n \"\"\"\n date = SmartDayArrow(self.year, 5, 1)\n easter_date = easter(self.year, self.easter_type)\n\n if date == easter_date:\n date = date.shift(days=2)\n elif date == easter_date.shift(days=1):\n date = date.shift(days=1)\n\n return [Holiday(\n self.locale,\n \"\",\n date,\n \"Πρωτομαγιά\",\n \"NF\"\n )]\n" }, { "alpha_fraction": 0.31769856810569763, "alphanum_fraction": 0.36272671818733215, "avg_line_length": 20.62162208557129, "blob_id": "cdbbece8f5233edb38db071a28d6db8b3dcdced7", "content_id": "96a7d7e194331a47be7a4fd3190784cec84483d1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1599, "license_type": "permissive", "max_line_length": 50, "num_lines": 74, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-03-25',\n 'description': 'Good Friday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-28',\n 'description': 'Easter Monday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-02',\n 'description': 'Early May Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2016-05-30',\n 'description': 'Spring Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2016-08-29',\n 'description': 'August Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2016-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'Boxing Day',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-12-27',\n 'description': 'Christmas Day (observed)',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n }\n]" }, { "alpha_fraction": 0.31518325209617615, "alphanum_fraction": 0.37102967500686646, "avg_line_length": 21.046154022216797, "blob_id": "7222bb94d141a83fe9f46485d5f9196bb21d9903", "content_id": "47cf90fa2cd7b1a9f47525c93c90c01b32655542", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2892, "license_type": "permissive", "max_line_length": 54, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': 'Újév',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-03-15',\n 'description': 'Az 1848-as forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Nagypéntek',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-21',\n 'description': 'Húsvét',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Húsvéthétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-05-01',\n 'description': 'A munka ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-06-09',\n 'description': 'Pünkösd',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Pünkösdhétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-08-19',\n 'description': 'Munkaszüneti nap',\n 'locale': 'hu-HU',\n 'notes': '2019-08-10 munkanap',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-08-20',\n 'description': 'Az államalapítás ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-10-23',\n 'description': 'Az 1956-os forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-11-01',\n 'description': 'Mindenszentek',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-24',\n 'description': 'Munkaszüneti nap',\n 'locale': 'hu-HU',\n 'notes': '2019-12-07 munkanap',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-27',\n 'description': 'Munkaszüneti nap',\n 'locale': 'hu-HU',\n 'notes': '2019-12-14 munkanap',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3314873278141022, "alphanum_fraction": 0.37579113245010376, "avg_line_length": 21.1842098236084, "blob_id": "b678bc21c6f143fb0db3e0aa48880b61faf58708", "content_id": "05d245684e8438828675b7cef1d461d819d925b1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2768, "license_type": "permissive", "max_line_length": 93, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Πρωτοχρονιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-01-06',\n 'description': 'Θεοφάνεια',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-03-03',\n 'description': 'Καθαρά Δευτέρα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-03-25',\n 'description': 'Ευαγγελισμός της Θεοτόκου και Εθνική Ημέρα Ανεξαρτησίας της Ελλάδας',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Μεγάλη Παρασκευή',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-19',\n 'description': 'Μεγάλο Σάββατο',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-20',\n 'description': 'Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Δευτέρα του Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Πρωτομαγιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Δευτέρα του Αγίου Πνεύματος',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-08-15',\n 'description': 'Κοίμηση της Θεοτόκου',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-10-28',\n 'description': 'Ημέρα του ΌΧΙ',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Χριστούγεννα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Επόμενη ημέρα Χριστουγέννων',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3293954133987427, "alphanum_fraction": 0.37665045261383057, "avg_line_length": 21.146154403686523, "blob_id": "aedd46d05d352fd421b8e00d2abbf61161be5606", "content_id": "e5a540927fa3f41a950cb1820f03f8088f14237b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2900, "license_type": "permissive", "max_line_length": 49, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': 'Nýársdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-04-18',\n 'description': 'Skírdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Föstudagurinn langi',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-21',\n 'description': 'Páskadagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Annar dagur páska',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-25',\n 'description': 'Sumardagurinn fyrsti',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Verkalýðsdagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-05-30',\n 'description': 'Uppstigningardagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-09',\n 'description': 'Hvítasunnudagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Annar dagur hvítasunnu',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-17',\n 'description': 'Þjóðhátíðardagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-08-05',\n 'description': 'Frídagur verslunarmanna',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2019-12-24',\n 'description': 'Aðfangadagur jóla',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Jóladagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Annar dagur jóla',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-31',\n 'description': 'Gamlársdagur',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31302428245544434, "alphanum_fraction": 0.35894039273262024, "avg_line_length": 20.37735939025879, "blob_id": "d7fe0cad2ff427defbb0f694d0ea512e83eb3dd7", "content_id": "4c5b2c744c90973e4490246204cb626a6b769044", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2273, "license_type": "permissive", "max_line_length": 47, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Nyårsdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-01-06',\n 'description': 'Trettondedagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Långfredagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-17',\n 'description': 'Påskdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Annandag påsk',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Första maj',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-26',\n 'description': 'Kristi himmelfärdsdag',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-05',\n 'description': 'Pingst',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-25',\n 'description': 'Midsommardagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-11-05',\n 'description': 'Alla helgons dag',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-12-06',\n 'description': 'Självständighetsdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Juldagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Annandag jul',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31541725993156433, "alphanum_fraction": 0.36067891120910645, "avg_line_length": 20.439393997192383, "blob_id": "2508887527330200d74db6951223b2cbbac916ef", "content_id": "b7728b217a65e9a4c148f0df829cb075d82d43fb", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1414, "license_type": "permissive", "max_line_length": 48, "num_lines": 66, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-03-29',\n 'description': 'Good Friday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Easter Monday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-06',\n 'description': 'Early May Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2013-05-27',\n 'description': 'Spring Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2013-08-26',\n 'description': 'August Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2013-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-26',\n 'description': 'Boxing Day',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.32159623503685, "alphanum_fraction": 0.36760562658309937, "avg_line_length": 20.744897842407227, "blob_id": "5c0920ed1487c82673b6bbb8448489afb5da0e6e", "content_id": "2c6efb13a98c9db80b6a5dfe61dfbc6a77efea3e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2149, "license_type": "permissive", "max_line_length": 67, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': 'Uusaasta',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-02-24',\n 'description': 'Iseseisvuspäev, Eesti Vabariigi aastapäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-04-03',\n 'description': 'Suur reede',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-05',\n 'description': 'Ülestõusmispühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-01',\n 'description': 'Kevadpüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-24',\n 'description': 'Nelipühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-06-23',\n 'description': 'Võidupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-06-24',\n 'description': 'Jaanipäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-08-20',\n 'description': 'Taasiseseisvumispäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-24',\n 'description': 'Jõululaupäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Esimene jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-26',\n 'description': 'Teine jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.5618448853492737, "alphanum_fraction": 0.5946890115737915, "avg_line_length": 26.519229888916016, "blob_id": "ea8f8179a7cbe0598242763716f9aa4edd404000", "content_id": "cccecd9effec40e724dd5202226c00cdb77f8db5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1439, "license_type": "permissive", "max_line_length": 98, "num_lines": 52, "path": "/src/holidata/holidays/sv-FI.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom holidata.utils import SmartDayArrow\nfrom .holidays import Locale, Holiday\n\n\"\"\"\nsource: https://almanakka.helsinki.fi/en/flag-days-and-holidays-in-finland.html\n\"\"\"\n\n\nclass sv_FI(Locale):\n \"\"\"\n 01-01: [NF] Nyårsdagen\n 01-06: [NRF] Trettondedagen\n 05-01: [NF] Första maj\n 12-06: [NF] Självständighetsdagen\n 12-25: [NRF] Juldagen\n 12-26: [NRF] Annandag jul\n 2 days before Easter: [NRV] Långfredagen\n Easter: [NRV] Påskdagen\n 1 day after Easter: [NRV] Annandag påsk\n 39 days after Easter: [NRV] Kristi himmelfärdsdag\n 49 days after Easter: [NRV] Pingst\n \"\"\"\n\n locale = \"sv-FI\"\n easter_type = EASTER_WESTERN\n\n def holiday_midsommardagen(self):\n \"\"\"\n Saturday between 20 and 26 June: Midsommardagen\n \"\"\"\n return [Holiday(\n self.locale,\n \"\",\n SmartDayArrow(self.year, 6, 19).shift_to_weekday(\"saturday\", order=1, reverse=False),\n \"Midsommardagen\",\n \"NRV\"\n )]\n\n def holiday_alla_helgons_dag(self):\n \"\"\"\n Saturday between 31 October and 6 November: Alla helgons dag (All Saints' Day)\n \"\"\"\n return [Holiday(\n self.locale,\n \"\",\n SmartDayArrow(self.year, 10, 30).shift_to_weekday(\"saturday\", order=1, reverse=False),\n \"Alla helgons dag\",\n \"NRV\"\n )]\n" }, { "alpha_fraction": 0.3322019577026367, "alphanum_fraction": 0.3763258457183838, "avg_line_length": 21.245283126831055, "blob_id": "8bdc35a9af8f6a085e6173c0f9b5d2323dea16fc", "content_id": "595bb3f27baae226c7a23165ed2753aedcbfefd5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2378, "license_type": "permissive", "max_line_length": 59, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pl_PL-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Nowy Rok',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-01-06',\n 'description': 'Trzech Króli',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-04-17',\n 'description': 'Wielkanoc',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Poniedziałek Wielkanocny',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Święto Pracy',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-03',\n 'description': 'Święto Konstytucji Trzeciego Maja',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-06-05',\n 'description': 'Zielone Świątki',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-16',\n 'description': 'Boże Ciało',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Wniebowzięcie Najświętszej Maryi Panny',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Wszystkich Świętych',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-11-11',\n 'description': 'Narodowe Święto Niepodległości',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Boże Narodzenie (pierwszy dzień)',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Boże Narodzenie (drugi dzień)',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31389227509498596, "alphanum_fraction": 0.35925474762916565, "avg_line_length": 20.66666603088379, "blob_id": "ac15eaeb02f4267615317322053161644cf5d9f3", "content_id": "f14da5cabb4cd185e60241095e77d05b8a32cd8b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2469, "license_type": "permissive", "max_line_length": 49, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-01-19',\n 'description': 'Birthday of Martin Luther King, Jr.',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2015-02-16',\n 'description': \"Washington's Birthday\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2015-04-20',\n 'description': \"Patriots' Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': 'MA',\n 'type': 'V'\n },\n {\n 'date': '2015-04-20',\n 'description': \"Patriots' Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': 'ME',\n 'type': 'V'\n },\n {\n 'date': '2015-05-25',\n 'description': 'Memorial Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2015-07-04',\n 'description': 'Independence Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-09-07',\n 'description': 'Labor Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2015-10-12',\n 'description': 'Columbus Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2015-11-11',\n 'description': 'Veterans Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-11-26',\n 'description': 'Thanksgiving Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2015-11-27',\n 'description': 'Day after Thanksgiving',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2015-12-24',\n 'description': 'Christmas Eve',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31836047768592834, "alphanum_fraction": 0.3632790446281433, "avg_line_length": 20.731706619262695, "blob_id": "8c825994acec88cb95d9995b13581653690f53f0", "content_id": "2ed9eafef783ad2e2e87d8af439f14b1bdac8e1b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1781, "license_type": "permissive", "max_line_length": 50, "num_lines": 82, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Good Friday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Easter Monday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-03',\n 'description': 'Early May Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-05-31',\n 'description': 'Spring Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-08-30',\n 'description': 'August Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Boxing Day',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-27',\n 'description': 'Christmas Day (observed)',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-12-28',\n 'description': 'Boxing Day (observed)',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n }\n]" }, { "alpha_fraction": 0.3293954133987427, "alphanum_fraction": 0.37665045261383057, "avg_line_length": 21.146154403686523, "blob_id": "043de2bd1032a888c79086de27b71e50a4b13564", "content_id": "c60ecbbf16dd902e54f30284276255289042ff55", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2900, "license_type": "permissive", "max_line_length": 49, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2017] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2017-01-01',\n 'description': 'Nýársdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Skírdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-14',\n 'description': 'Föstudagurinn langi',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-16',\n 'description': 'Páskadagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-17',\n 'description': 'Annar dagur páska',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-20',\n 'description': 'Sumardagurinn fyrsti',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2017-05-01',\n 'description': 'Verkalýðsdagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-05-25',\n 'description': 'Uppstigningardagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-04',\n 'description': 'Hvítasunnudagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-05',\n 'description': 'Annar dagur hvítasunnu',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-17',\n 'description': 'Þjóðhátíðardagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-08-07',\n 'description': 'Frídagur verslunarmanna',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2017-12-24',\n 'description': 'Aðfangadagur jóla',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-25',\n 'description': 'Jóladagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-26',\n 'description': 'Annar dagur jóla',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-31',\n 'description': 'Gamlársdagur',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.32209739089012146, "alphanum_fraction": 0.3670412003993988, "avg_line_length": 20.66216278076172, "blob_id": "68c5b5d9893ac49494387be38b8990a650ddd00c", "content_id": "a7435c4b3ccdda5dc73d3841a436536c6df3a643", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1738, "license_type": "permissive", "max_line_length": 52, "num_lines": 74, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Новый Год',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-01-07',\n 'description': 'Рождество Христово',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-02-23',\n 'description': 'День защитника Отечества',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-03-08',\n 'description': 'Международный женский день',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-04-24',\n 'description': 'Пасха',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Праздник весны и труда',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-09',\n 'description': 'День Победы',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-06-12',\n 'description': 'День России',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-11-04',\n 'description': 'День народного единства',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.30328208208084106, "alphanum_fraction": 0.35189032554626465, "avg_line_length": 20.305309295654297, "blob_id": "375b83db5e3f214cc06d867a4f0d85572f7ee894", "content_id": "741ee629c11fcc6e067dbe98100a1b787023b86e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4817, "license_type": "permissive", "max_line_length": 49, "num_lines": 226, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-01-06',\n 'description': 'Heilige drei Könige',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '2',\n 'type': 'RF'\n },\n {\n 'date': '2021-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '6',\n 'type': 'RF'\n },\n {\n 'date': '2021-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '7',\n 'type': 'RF'\n },\n {\n 'date': '2021-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '8',\n 'type': 'RF'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-04',\n 'description': 'Ostersonntag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Ostermontag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Staatsfeiertag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-04',\n 'description': 'Florian',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '4',\n 'type': 'F'\n },\n {\n 'date': '2021-05-13',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-23',\n 'description': 'Pfingstsonntag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pfingstmontag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-06-03',\n 'description': 'Fronleichnam',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-09-24',\n 'description': 'Rupert',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '5',\n 'type': 'F'\n },\n {\n 'date': '2021-10-10',\n 'description': 'Tag der Volksabstimmung',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '2',\n 'type': 'F'\n },\n {\n 'date': '2021-10-26',\n 'description': 'Nationalfeiertag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-11-11',\n 'description': 'Martin',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '1',\n 'type': 'F'\n },\n {\n 'date': '2021-11-15',\n 'description': 'Leopold',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '9',\n 'type': 'F'\n },\n {\n 'date': '2021-11-15',\n 'description': 'Leopold',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '3',\n 'type': 'F'\n },\n {\n 'date': '2021-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-24',\n 'description': 'Heiliger Abend',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Christtag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Stefanitag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-31',\n 'description': 'Silvester',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.45176470279693604, "alphanum_fraction": 0.4713725447654724, "avg_line_length": 31.075471878051758, "blob_id": "088d70a727068407255a5d77b4ecc283771a9439", "content_id": "7ab6693314513c6ab4f59ba8a10efd76be4cf935", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5102, "license_type": "permissive", "max_line_length": 74, "num_lines": 159, "path": "/src/holidata/holidays/en-GB.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom holidata.utils import SmartDayArrow, month_reference\nfrom .holidays import Holiday, Locale\n\n\nclass en_GB(Locale):\n \"\"\"\n 01-01: [NF] New Year's Day\n 12-25: [NRF] Christmas Day\n 12-26: [NF] Boxing Day\n 2 days before Easter: [NRV] Good Friday\n 1 day after Easter: [NRV] Easter Monday\n 1. monday in may: [NV] Early May Bank Holiday\n 1. last monday in august: [NV] August Bank Holiday\n \"\"\"\n\n locale = \"en-GB\"\n easter_type = EASTER_WESTERN\n\n def holiday_new_years_day_observed(self):\n date = SmartDayArrow(self.year, 1, 1)\n\n if date.weekday() in [\"saturday\", \"sunday\"]:\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=date.shift_to_weekday(\"monday\", including=True),\n description=\"New Year's Day (observed)\",\n flags=\"NV\",\n notes=\"\")]\n\n return []\n\n def holiday_spring_bank_holiday(self):\n \"\"\"\n 1. last monday in may: [NV] Spring Bank Holiday\n 2012: Moved to June 4, because of Queen’s Diamond Jubilee\n 2022: Moved to June 2, because of Queen's Platinum Jubilee\n \"\"\"\n if self.year == 2012:\n date = SmartDayArrow(self.year, 6, 4)\n elif self.year == 2022:\n date = SmartDayArrow(self.year, 6, 2)\n else:\n date = month_reference(self.year,\n \"may\",\n first=False) \\\n .shift_to_weekday(\"monday\",\n order=1,\n reverse=True,\n including=True)\n\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=date,\n description=\"Spring Bank Holiday\",\n flags=\"NV\",\n notes=\"\")]\n\n def holiday_christmas_day_observed(self):\n date = SmartDayArrow(self.year, 12, 25)\n\n if date.weekday() == \"saturday\":\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=date.shift_to_weekday(\"monday\", including=True),\n description=\"Christmas Day (observed)\",\n flags=\"NV\",\n notes=\"\")]\n\n elif date.weekday() == \"sunday\":\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=date.shift_to_weekday(\"tuesday\", including=True),\n description=\"Christmas Day (observed)\",\n flags=\"NV\",\n notes=\"\")]\n\n return []\n\n def holiday_boxing_day_observed(self):\n date = SmartDayArrow(self.year, 12, 26)\n\n if date.weekday() == \"sunday\":\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=date.shift_to_weekday(\"tuesday\", including=True),\n description=\"Boxing Day (observed)\",\n flags=\"NV\",\n notes=\"\")]\n\n elif date.weekday() == \"saturday\":\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=date.shift_to_weekday(\"monday\", including=True),\n description=\"Boxing Day (observed)\",\n flags=\"NV\",\n notes=\"\")]\n\n return []\n\n def holiday_coronation_charles_iii(self):\n \"\"\"\n 2023-05-08: Bank holiday for the coronation of King Charles III\n \"\"\"\n if self.year == 2023:\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 5, 8),\n description=\"Coronation of King Charles III\",\n flags=\"NV\",\n notes=\"\")]\n\n return []\n\n def holiday_royal_jubilees(self):\n \"\"\"\n 2012-06-05: Queen's Diamond Jubilee\n 2022-06-03: Queen's Platinum Jubilee\n \"\"\"\n if self.year == 2012:\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 6, 5),\n description=\"Queen's Diamond Jubilee\",\n flags=\"NV\",\n notes=\"\")]\n\n if self.year == 2022:\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 6, 3),\n description=\"Queen's Platinum Jubilee\",\n flags=\"NV\",\n notes=\"\")]\n\n return []\n\n def holiday_state_funeral_of_queen_elizabeth_ii(self):\n if self.year == 2022:\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 9, 19),\n description=\"State Funeral of Queen Elizabeth II\",\n flags=\"NF\",\n notes=\"\")]\n\n return []\n" }, { "alpha_fraction": 0.3079564869403839, "alphanum_fraction": 0.35374927520751953, "avg_line_length": 20.317073822021484, "blob_id": "1c86e6da0b45e7d921b403282d0fa204539615cb", "content_id": "84ce01cf230bf2efded1bb77e0d0eda783fc9487", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1747, "license_type": "permissive", "max_line_length": 50, "num_lines": 82, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-01-02',\n 'description': \"Day after New Year's Day\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2013-02-06',\n 'description': 'Waitangi Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-03-29',\n 'description': 'Good Friday',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Easter Monday',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-04-25',\n 'description': 'ANZAC Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-06-03',\n 'description': \"Queen's Birthday\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2013-10-28',\n 'description': 'Labour Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2013-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-26',\n 'description': 'Boxing Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.320070743560791, "alphanum_fraction": 0.3648688495159149, "avg_line_length": 21.03896141052246, "blob_id": "3627da9105bca3af805dc27bce4898ac2064abd7", "content_id": "7cf2221139352216e02363d3258fc88deca9aaf5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3406, "license_type": "permissive", "max_line_length": 53, "num_lines": 154, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2017] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2017-01-01',\n 'description': 'Año Nuevo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-01-09',\n 'description': 'Día de los Reyes Magos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-03-20',\n 'description': 'Día de San José',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Jueves Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-14',\n 'description': 'Viernes Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-16',\n 'description': 'Domingo de Pascua',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-05-01',\n 'description': 'Día del Trabajo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-05-29',\n 'description': 'La Ascensión del Señor',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-19',\n 'description': 'Corpus Christi',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-26',\n 'description': 'El Sagrado Corazón de Jesús',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-07-03',\n 'description': 'San Pedro y San Pablo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-07-20',\n 'description': 'Grito de Independencia',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-08-07',\n 'description': 'Batalla de Boyacá',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-08-21',\n 'description': 'Asunción de la Virgen',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-10-16',\n 'description': 'Día de la Raza',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2017-11-06',\n 'description': 'Todos los Santos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-11-13',\n 'description': 'Independencia de Cartagena',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2017-12-08',\n 'description': 'Inmaculada Concepción',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-25',\n 'description': 'Navidad',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31142035126686096, "alphanum_fraction": 0.3574855923652649, "avg_line_length": 20.275510787963867, "blob_id": "24a873ab828f94fe499c91544956b0117215b1d2", "content_id": "ab755834b3d7337e5f1cb10425f1ebaf619b3a30", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2088, "license_type": "permissive", "max_line_length": 47, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Nytårsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-04-14',\n 'description': 'Skærtorsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Langfredag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-17',\n 'description': 'Påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Anden påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-13',\n 'description': 'Store bededag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-26',\n 'description': 'Kristi himmelfartsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-05',\n 'description': 'Grundlovsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-06-05',\n 'description': 'Pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Anden pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Anden juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.5982142686843872, "alphanum_fraction": 0.5982142686843872, "avg_line_length": 15, "blob_id": "8395d48b31cdd652ea17b5914636cdfc32c7a622", "content_id": "dc49f84a894ffb8a2ca48834fe69554bf84c6af6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 112, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/NZ.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass NZ(Country):\n id = \"NZ\"\n languages = [\"en\"]\n default_lang = \"en\"\n" }, { "alpha_fraction": 0.3314873278141022, "alphanum_fraction": 0.37579113245010376, "avg_line_length": 21.1842098236084, "blob_id": "032b94e371a5a71555ba5adbb7ca0a40fff6b60f", "content_id": "4f0a82ca7234db750a6564a725fdbc69ccedacd9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2768, "license_type": "permissive", "max_line_length": 93, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': 'Πρωτοχρονιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-01-06',\n 'description': 'Θεοφάνεια',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-03-14',\n 'description': 'Καθαρά Δευτέρα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-25',\n 'description': 'Ευαγγελισμός της Θεοτόκου και Εθνική Ημέρα Ανεξαρτησίας της Ελλάδας',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-04-29',\n 'description': 'Μεγάλη Παρασκευή',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-04-30',\n 'description': 'Μεγάλο Σάββατο',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-01',\n 'description': 'Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-02',\n 'description': 'Δευτέρα του Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-03',\n 'description': 'Πρωτομαγιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-06-20',\n 'description': 'Δευτέρα του Αγίου Πνεύματος',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-08-15',\n 'description': 'Κοίμηση της Θεοτόκου',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-10-28',\n 'description': 'Ημέρα του ΌΧΙ',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-12-25',\n 'description': 'Χριστούγεννα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'Επόμενη ημέρα Χριστουγέννων',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31509268283843994, "alphanum_fraction": 0.3614298403263092, "avg_line_length": 20.38679313659668, "blob_id": "00369b46fb2be661526f0401aeaa01c6e0081ffa", "content_id": "935c2028f0d2d7d6dde2766aca50b39b30004c3e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2293, "license_type": "permissive", "max_line_length": 42, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': 'Uudenvuodenpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-01-06',\n 'description': 'Loppiainen',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-04-07',\n 'description': 'Pitkäperjantai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-09',\n 'description': 'Pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-10',\n 'description': '2. pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-01',\n 'description': 'Vappu',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-18',\n 'description': 'Helatorstai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-28',\n 'description': 'Helluntaipäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-06-24',\n 'description': 'Juhannuspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-11-04',\n 'description': 'Pyhäinpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-12-06',\n 'description': 'Itsenäisyyspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Joulupäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-26',\n 'description': 'Tapaninpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31509268283843994, "alphanum_fraction": 0.3614298403263092, "avg_line_length": 20.38679313659668, "blob_id": "99d6b91e4dfc06bb4814cf168c21681078313f49", "content_id": "408af2414802d0e527003e9ff3be6044fcce43ee", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2293, "license_type": "permissive", "max_line_length": 42, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2018] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2018-01-01',\n 'description': 'Uudenvuodenpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-01-06',\n 'description': 'Loppiainen',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-03-30',\n 'description': 'Pitkäperjantai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-01',\n 'description': 'Pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-02',\n 'description': '2. pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-05-01',\n 'description': 'Vappu',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-05-10',\n 'description': 'Helatorstai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-05-20',\n 'description': 'Helluntaipäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-06-23',\n 'description': 'Juhannuspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-11-03',\n 'description': 'Pyhäinpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-12-06',\n 'description': 'Itsenäisyyspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-12-25',\n 'description': 'Joulupäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-12-26',\n 'description': 'Tapaninpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.6090909242630005, "alphanum_fraction": 0.6090909242630005, "avg_line_length": 14.714285850524902, "blob_id": "62cd73e6430e6958e8bcd7fcb1b77bf562db1ecb", "content_id": "0aef8e987da20ee85348358866b994e64dba1852", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 110, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/NO.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass NO(Country):\n id = \"NO\"\n languages = \"nb\"\n default_lang = \"nb\"\n" }, { "alpha_fraction": 0.30265551805496216, "alphanum_fraction": 0.34921789169311523, "avg_line_length": 20.153846740722656, "blob_id": "53aff4ef2e03bed1cd7220d42c1ffc19e8c4fc1c", "content_id": "c8abf8153f225d08e8081f389b229dd02b274421", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2753, "license_type": "permissive", "max_line_length": 42, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': 'Nyårsdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-01-06',\n 'description': 'Trettondedag jul',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-03-25',\n 'description': 'Långfredagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-27',\n 'description': 'Påskdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-28',\n 'description': 'Annandag påsk',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-01',\n 'description': 'Första maj',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-05',\n 'description': 'Kristi himmelsfärdsdag',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-15',\n 'description': 'Pingstdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-06-06',\n 'description': 'Nationaldagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-06-24',\n 'description': 'Midsommarafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2016-06-25',\n 'description': 'Midsommardagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2016-11-05',\n 'description': 'Alla helgons dag',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-12-24',\n 'description': 'Julafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-25',\n 'description': 'Juldagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'Annandag jul',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-31',\n 'description': 'Nyårsafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3056092858314514, "alphanum_fraction": 0.3520309329032898, "avg_line_length": 20.112245559692383, "blob_id": "871aea772d31b2fa940d80473b1973ef9a40d5aa", "content_id": "58d01bc2d3115b801a949137c20ccaef59c4d5f8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2068, "license_type": "permissive", "max_line_length": 42, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-04-21',\n 'description': 'Ostern',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Ostermontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-05-30',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-09',\n 'description': 'Pfingsten',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Pfingstmontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-07-21',\n 'description': 'Nationalfeiertag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-11-11',\n 'description': 'Waffenstillstand',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Weihnacht',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.4281429052352905, "alphanum_fraction": 0.5440414547920227, "avg_line_length": 32.03603744506836, "blob_id": "930cf0e37c87f87610218a5bef00bc0207335c98", "content_id": "602bcd3f766162d3bd2f2344fb25405fd082ffbf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3696, "license_type": "permissive", "max_line_length": 126, "num_lines": 111, "path": "/src/holidata/holidays/tr-TR.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom holidata.utils import SmartDayArrow\nfrom .holidays import Locale, Holiday\n\n\"\"\"\nsources:\nlaw on national holidays and general holidays: https://www.mevzuat.gov.tr/mevzuat?MevzuatNo=2429&MevzuatTur=1&MevzuatTertip=5 \ndates for holidays 'Ramazan Bayramı' and 'Kurban Bayramı': https://vakithesaplama.diyanet.gov.tr/dini_gunler.php\n\"\"\"\n\n\nclass tr_TR(Locale):\n \"\"\"\n 01-01: [NF] Yılbaşı\n 04-23: [NF] Ulusal Egemenlik ve Çocuk Bayramı\n 05-01: [NF] Emek ve Dayanışma Günü\n 05-19: [NF] Atatürk'ü Anma, Gençlik ve Spor Bayramı\n 08-30: [NF] Zafer Bayramı\n 10-29: [NF] Cumhuriyet Bayramı\n \"\"\"\n\n locale = \"tr-TR\"\n\n @staticmethod\n def __ramazan_bayrami_reference(year):\n ramazan_bayrami_reference = {\n 2011: SmartDayArrow(2011, 8, 29),\n 2012: SmartDayArrow(2012, 8, 18),\n 2013: SmartDayArrow(2013, 8, 7),\n 2014: SmartDayArrow(2014, 7, 27),\n 2015: SmartDayArrow(2015, 7, 16),\n 2016: SmartDayArrow(2016, 7, 4),\n 2017: SmartDayArrow(2017, 6, 24),\n 2018: SmartDayArrow(2018, 6, 14),\n 2019: SmartDayArrow(2019, 6, 4),\n 2020: SmartDayArrow(2020, 5, 23),\n 2021: SmartDayArrow(2021, 5, 12),\n 2022: SmartDayArrow(2022, 5, 1),\n 2023: SmartDayArrow(2023, 4, 20),\n 2024: SmartDayArrow(2024, 4, 9),\n 2025: SmartDayArrow(2025, 3, 29),\n 2026: SmartDayArrow(2026, 3, 19),\n 2027: SmartDayArrow(2027, 3, 8),\n }\n\n return ramazan_bayrami_reference[year]\n\n @staticmethod\n def __kurban_bayrami_reference(year):\n kurban_bayrami_reference = {\n 2011: SmartDayArrow(2011, 11, 5),\n 2012: SmartDayArrow(2012, 10, 24),\n 2013: SmartDayArrow(2013, 10, 14),\n 2014: SmartDayArrow(2014, 10, 3),\n 2015: SmartDayArrow(2015, 9, 23),\n 2016: SmartDayArrow(2016, 9, 11),\n 2017: SmartDayArrow(2017, 8, 31),\n 2018: SmartDayArrow(2018, 8, 20),\n 2019: SmartDayArrow(2019, 8, 10),\n 2020: SmartDayArrow(2020, 7, 30),\n 2021: SmartDayArrow(2021, 7, 19),\n 2022: SmartDayArrow(2022, 7, 8),\n 2023: SmartDayArrow(2023, 6, 27),\n 2024: SmartDayArrow(2024, 6, 15),\n 2025: SmartDayArrow(2025, 6, 5),\n 2026: SmartDayArrow(2026, 5, 26),\n 2027: SmartDayArrow(2027, 5, 15),\n }\n\n return kurban_bayrami_reference[year]\n\n def holiday_demokrasi_ve_milli_birlik_gunu(self):\n \"\"\"\n Democracy and National Unity Day (since 2017)\n 07-15 [NF] Demokrasi ve Milli Birlik Günü\n \"\"\"\n return [Holiday(\n self.locale,\n \"\",\n SmartDayArrow(self.year, 7, 15),\n \"Demokrasi ve Milli Birlik Günü\",\n \"NF\"\n )] if self.year >= 2017 else []\n\n def holiday_ramazan_bayrami(self):\n \"\"\"\n Ramazan Bayramı 1.-3. Gün\n \"\"\"\n reference = self.__ramazan_bayrami_reference(self.year)\n\n return [Holiday(\n self.locale,\n \"\",\n reference.shift(days=i),\n \"Ramazan Bayramı ({}. Gün)\".format(i),\n \"NRV\"\n ) for i in [1, 2, 3]]\n\n def holiday_kurban_bayrami(self):\n \"\"\"\n Kurban Bayramı 1.-4. Gün\n \"\"\"\n reference = self.__kurban_bayrami_reference(self.year)\n\n return [Holiday(\n self.locale,\n \"\",\n reference.shift(days=i),\n \"Kurban Bayramı ({}. Gün)\".format(i),\n \"NRV\"\n ) for i in [1, 2, 3, 4]]\n" }, { "alpha_fraction": 0.32209739089012146, "alphanum_fraction": 0.3670412003993988, "avg_line_length": 20.66216278076172, "blob_id": "574f9b49603b6bb65e32babda8138fd0e60ade7e", "content_id": "5dac272e4b7c0377f969e83f02b3a8e9b93fc98b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1738, "license_type": "permissive", "max_line_length": 52, "num_lines": 74, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Новый Год',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-01-07',\n 'description': 'Рождество Христово',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-02-23',\n 'description': 'День защитника Отечества',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-03-08',\n 'description': 'Международный женский день',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-04-20',\n 'description': 'Пасха',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Праздник весны и труда',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-09',\n 'description': 'День Победы',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-06-12',\n 'description': 'День России',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-11-04',\n 'description': 'День народного единства',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.5651568174362183, "alphanum_fraction": 0.598606288433075, "avg_line_length": 26.596153259277344, "blob_id": "5c250c877cd9d99debb07f1e0bf2a2e4b6f1c0fc", "content_id": "c56f8c82910f9bb503647a47bb47c5e335d155b4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1467, "license_type": "permissive", "max_line_length": 98, "num_lines": 52, "path": "/src/holidata/holidays/fi-FI.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom holidata.utils import SmartDayArrow\nfrom .holidays import Locale, Holiday\n\n\"\"\"\nsource: https://almanakka.helsinki.fi/en/flag-days-and-holidays-in-finland.html\n\"\"\"\n\n\nclass fi_FI(Locale):\n \"\"\"\n 01-01: [NF] Uudenvuodenpäivä\n 01-06: [NRF] Loppiainen\n 05-01: [NF] Vappu\n 12-06: [NF] Itsenäisyyspäivä\n 12-25: [NRF] Joulupäivä\n 12-26: [NRF] Tapaninpäivä\n 2 days before Easter: [NRV] Pitkäperjantai\n Easter: [NRV] Pääsiäispäivä\n 1 day after Easter: [NRV] 2. pääsiäispäivä\n 39 days after Easter: [NRV] Helatorstai\n 49 days after Easter: [NRV] Helluntaipäivä\n \"\"\"\n\n locale = \"fi-FI\"\n easter_type = EASTER_WESTERN\n\n def holiday_juhannuspaeivae(self):\n \"\"\"\n Saturday between 20 and 26 June: Juhannuspäivä (Midsummer Day)\n \"\"\"\n return [Holiday(\n self.locale,\n \"\",\n SmartDayArrow(self.year, 6, 19).shift_to_weekday(\"saturday\", order=1, reverse=False),\n \"Juhannuspäivä\",\n \"NRV\"\n )]\n\n def holiday_pyhaeinpaeivae(self):\n \"\"\"\n Saturday between 31 October and 6 November: Pyhäinpäivä (All Saints' Day)\n \"\"\"\n return [Holiday(\n self.locale,\n \"\",\n SmartDayArrow(self.year, 10, 30).shift_to_weekday(\"saturday\", order=1, reverse=False),\n \"Pyhäinpäivä\",\n \"NRV\"\n )]\n" }, { "alpha_fraction": 0.31723451614379883, "alphanum_fraction": 0.36217832565307617, "avg_line_length": 21.16265106201172, "blob_id": "93dad83f349d332bee087ae7a92a3571194118f2", "content_id": "3d8f83e69e75fdf9b5b9607a1a84ab5d764ecaf2", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11075, "license_type": "permissive", "max_line_length": 68, "num_lines": 498, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2012] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2012-01-01',\n 'description': 'Año Nuevo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-01-02',\n 'description': 'Lunes siguiente al Año Nuevo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-01-06',\n 'description': 'Epifanía del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-02-28',\n 'description': 'Día de Andalucía',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'F'\n },\n {\n 'date': '2012-03-01',\n 'description': 'Día de las Illes Balears',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'F'\n },\n {\n 'date': '2012-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'RF'\n },\n {\n 'date': '2012-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'RF'\n },\n {\n 'date': '2012-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'RF'\n },\n {\n 'date': '2012-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RF'\n },\n {\n 'date': '2012-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RF'\n },\n {\n 'date': '2012-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'RF'\n },\n {\n 'date': '2012-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'RF'\n },\n {\n 'date': '2012-04-05',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'RV'\n },\n {\n 'date': '2012-04-05',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RV'\n },\n {\n 'date': '2012-04-05',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'RV'\n },\n {\n 'date': '2012-04-05',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'RV'\n },\n {\n 'date': '2012-04-05',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'RV'\n },\n {\n 'date': '2012-04-05',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'RV'\n },\n {\n 'date': '2012-04-05',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'RV'\n },\n {\n 'date': '2012-04-05',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CN',\n 'type': 'RV'\n },\n {\n 'date': '2012-04-05',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'RV'\n },\n {\n 'date': '2012-04-05',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'RV'\n },\n {\n 'date': '2012-04-05',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'RV'\n },\n {\n 'date': '2012-04-05',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'RV'\n },\n {\n 'date': '2012-04-05',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'RV'\n },\n {\n 'date': '2012-04-05',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RV'\n },\n {\n 'date': '2012-04-05',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RV'\n },\n {\n 'date': '2012-04-05',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'RV'\n },\n {\n 'date': '2012-04-05',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'RV'\n },\n {\n 'date': '2012-04-06',\n 'description': 'Viernes Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-08',\n 'description': 'Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-09',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'RV'\n },\n {\n 'date': '2012-04-09',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'RV'\n },\n {\n 'date': '2012-04-09',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RV'\n },\n {\n 'date': '2012-04-09',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'RV'\n },\n {\n 'date': '2012-04-23',\n 'description': 'Fiesta de Castilla y León',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'F'\n },\n {\n 'date': '2012-04-23',\n 'description': 'San Jorge / Día de Aragón',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RF'\n },\n {\n 'date': '2012-05-01',\n 'description': 'Fiesta del Trabajo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-02',\n 'description': 'Fiesta de la Comunidad de Madrid',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'F'\n },\n {\n 'date': '2012-05-17',\n 'description': 'Día de las Letras Gallegas',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'F'\n },\n {\n 'date': '2012-05-30',\n 'description': 'Día de Canarias',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CN',\n 'type': 'F'\n },\n {\n 'date': '2012-05-31',\n 'description': 'Día de Castilla-La Mancha',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'F'\n },\n {\n 'date': '2012-06-07',\n 'description': 'Corpus Christi',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'RV'\n },\n {\n 'date': '2012-06-09',\n 'description': 'Día de la Región de Murcia',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'F'\n },\n {\n 'date': '2012-06-09',\n 'description': 'Día de La Rioja',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'F'\n },\n {\n 'date': '2012-07-25',\n 'description': 'Santiago Apóstol',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'RF'\n },\n {\n 'date': '2012-07-25',\n 'description': 'Santiago Apóstol / Día Nacional de Galicia',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'RF'\n },\n {\n 'date': '2012-08-15',\n 'description': 'Asunción de la Virgen',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-09-08',\n 'description': 'Día de Asturias',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'F'\n },\n {\n 'date': '2012-09-08',\n 'description': 'Día de Extremadura',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'F'\n },\n {\n 'date': '2012-09-11',\n 'description': 'Fiesta Nacional de Cataluña',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'F'\n },\n {\n 'date': '2012-09-15',\n 'description': 'La Bien Aparecida',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'RF'\n },\n {\n 'date': '2012-10-09',\n 'description': 'Día de la Comunitat Valenciana',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'F'\n },\n {\n 'date': '2012-10-12',\n 'description': 'Fiesta Nacional de España',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-10-25',\n 'description': 'Día del País Vasco-Euskadiko Eguna',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'F'\n },\n {\n 'date': '2012-10-26',\n 'description': 'Fiesta del Sacrificio (Aid El Kebir)',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RV'\n },\n {\n 'date': '2012-10-27',\n 'description': 'Fiesta del Sacrificio (Eidul Adha)',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'RV'\n },\n {\n 'date': '2012-11-01',\n 'description': 'Todos los Santos',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-06',\n 'description': 'Día de la Constitución Española',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-12-08',\n 'description': 'Inmaculada Concepción',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-25',\n 'description': 'Natividad del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'RF'\n }\n]" }, { "alpha_fraction": 0.5104275345802307, "alphanum_fraction": 0.5208550691604614, "avg_line_length": 21.83333396911621, "blob_id": "4d80530eae1ad807cdcdb803df216747cdb6f0ee", "content_id": "7321a6ec43eba8068a6534dcb4805fb85eb6480b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1918, "license_type": "permissive", "max_line_length": 77, "num_lines": 84, "path": "/src/holidata/utils.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "\"\"\"\nProvides date-handling related utils.\n\"\"\"\nimport dateutil\nfrom arrow import Arrow\n\n\nclass SmartDayArrow(Arrow):\n \"\"\"\n A wrapper around Arrow datetime reference that provides additional\n convenience methods.\n \"\"\"\n\n def weekday(self):\n \"\"\"\n Provide a more readable weekday representation.\n \"\"\"\n\n weekdays = [\n \"monday\",\n \"tuesday\",\n \"wednesday\",\n \"thursday\",\n \"friday\",\n \"saturday\",\n \"sunday\",\n ]\n\n return weekdays[Arrow.weekday(self)]\n\n def shift_to_weekday(self, day, order=1, reverse=False, including=False):\n \"\"\"\n Shifts to {order}. weekday in the given direction, i.e.\n 2. monday before this date would be:\n\n >>> arrow.shift_to_weekday(\"monday\", order=2, reverse=True)\n \"\"\"\n\n result = self\n\n if including and result.weekday() == day:\n if order == 1:\n return result\n else:\n order = order - 1\n\n while order > 0:\n result = result.shift(days=1 if not reverse else -1)\n if day == result.weekday():\n order = order - 1\n\n return result\n\n\ndef easter(year, easter_type):\n date = dateutil.easter.easter(year, easter_type)\n return SmartDayArrow(date.year, date.month, date.day)\n\n\ndef month_reference(year, month, first=True):\n months = [\n \"january\",\n \"february\",\n \"march\",\n \"april\",\n \"may\",\n \"june\",\n \"july\",\n \"august\",\n \"september\",\n \"october\",\n \"november\",\n \"december\",\n ]\n\n month = months.index(month.lower()) + 1\n\n if first:\n return SmartDayArrow(year, month, 1)\n else:\n return SmartDayArrow(\n year if month != 12 else year+1,\n month+1 if month != 12 else 1,\n 1).shift(days=-1)\n" }, { "alpha_fraction": 0.31509268283843994, "alphanum_fraction": 0.3614298403263092, "avg_line_length": 20.38679313659668, "blob_id": "2f2d985ee411beb5c7df3f4e336c03af722742d9", "content_id": "fb8d36a17bee5493a66ae41298f25510ddf33e27", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2293, "license_type": "permissive", "max_line_length": 42, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Uudenvuodenpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-01-06',\n 'description': 'Loppiainen',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Pitkäperjantai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-20',\n 'description': 'Pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-21',\n 'description': '2. pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Vappu',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-29',\n 'description': 'Helatorstai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-08',\n 'description': 'Helluntaipäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-21',\n 'description': 'Juhannuspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Pyhäinpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-12-06',\n 'description': 'Itsenäisyyspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Joulupäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Tapaninpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3047436475753784, "alphanum_fraction": 0.3545759320259094, "avg_line_length": 20.306121826171875, "blob_id": "e0d65cceac4b5870df467874bde23fd570051687", "content_id": "9570443bc8d66236a02225b83bd001af263a7b20", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2093, "license_type": "permissive", "max_line_length": 45, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': \"Jour de l'an\",\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-04-25',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-05-01',\n 'description': 'Fête du premier mai',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-05-08',\n 'description': 'Armistice 1945',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-02',\n 'description': 'Ascension',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-12',\n 'description': 'Pentecôte',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-13',\n 'description': 'Lundi de Pentecôte',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-07-14',\n 'description': 'Fête nationale',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-08-15',\n 'description': 'Assomption',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-11-01',\n 'description': 'Toussaint',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-11-11',\n 'description': 'Armistice 1918',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Noël',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.5754830837249756, "alphanum_fraction": 0.5990338325500488, "avg_line_length": 31.47058868408203, "blob_id": "3e4ab4e0c89e632eb28d9aebf9cc2d238363d874", "content_id": "8343393d492aac191f06afa2e7f830766050b262", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1656, "license_type": "permissive", "max_line_length": 107, "num_lines": 51, "path": "/setup.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\nfrom setuptools import find_packages, setup\nimport pathlib\n\nhere = pathlib.Path(__file__).parent.resolve()\n\nlong_description = (here / \"README.md\").read_text(encoding=\"utf-8\")\n\nsetup(\n name=\"holidata\",\n version=\"2023.04.0\",\n description=\"Holidata is a utility for algorithmically producing holidays for a given locale and year\",\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/GothenburgBitFactory/holidata\",\n author=\"Gothenburg Bit Factory\",\n author_email=\"[email protected]\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"License :: OSI Approved :: MIT License\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Programming Language :: Python :: 3 :: Only\",\n ],\n keywords=\"holiday, calendar\",\n package_dir={\"\": \"src\"},\n packages=find_packages(where=\"src\"),\n include_package_data=True,\n python_requires=\">=3.7, <4\",\n install_requires=[\n \"arrow >= 1.2.2\",\n \"docopt >= 0.6.2\",\n \"python-dateutil >= 2.8.2\",\n ],\n extras_require={\n \"test\": [\n \"pytest >= 7.1.2\",\n \"snapshottest >= 0.6.0\",\n ]\n },\n scripts=[\"bin/holidata\"],\n project_urls={\n \"Bug Reports\": \"https://github.com/GothenburgBitFactory/holidata/issues\",\n \"Source\": \"https://github.com/GothenburgBitFactory/holidata\",\n \"Website\": \"https://holidata.net\",\n },\n)\n" }, { "alpha_fraction": 0.5812949538230896, "alphanum_fraction": 0.6474820375442505, "avg_line_length": 25.730770111083984, "blob_id": "45df690a65e77c99cf54d1058cf66059ab4c3ea5", "content_id": "22a396270c27e7f5551c8ec6a8fe185dd3f049c8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 706, "license_type": "permissive", "max_line_length": 49, "num_lines": 26, "path": "/src/holidata/holidays/pt-PT.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom .holidays import Locale\n\n\nclass pt_PT(Locale):\n \"\"\"\n 01-01: [NF] Ano Novo\n 04-25: [NF] Dia da Liberdade\n 05-01: [NF] Dia do Trabalhador\n 06-10: [NF] Dia de Portugal\n 08-15: [NF] Assunção de Nossa Senhora\n 10-05: [NF] Implantação da República\n 11-01: [NF] Dia de Todos os Santos\n 12-01: [NF] Restauração da Independência\n 12-08: [NF] Imaculada Conceição\n 12-25: [NF] Natal\n 47 days before Easter: [NRV] Carnaval\n 2 days before Easter: [NRV] Sexta-feira Santa\n Easter: [NRV] Páscoa\n 60 days after Easter: [NRV] Corpo de Deus\n \"\"\"\n\n locale = \"pt-PT\"\n easter_type = EASTER_WESTERN\n" }, { "alpha_fraction": 0.3194199800491333, "alphanum_fraction": 0.3641636371612549, "avg_line_length": 21.24884796142578, "blob_id": "2f7f7c45ec43ed375dd131b61b9565d2888a750d", "content_id": "bbc27564863282c13215da38d0b97fea217a9787", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9686, "license_type": "permissive", "max_line_length": 68, "num_lines": 434, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2018] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2018-01-01',\n 'description': 'Año Nuevo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-01-06',\n 'description': 'Epifanía del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-02-28',\n 'description': 'Día de Andalucía',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'F'\n },\n {\n 'date': '2018-03-01',\n 'description': 'Día de las Illes Balears',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'F'\n },\n {\n 'date': '2018-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'RF'\n },\n {\n 'date': '2018-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'RF'\n },\n {\n 'date': '2018-03-29',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'RV'\n },\n {\n 'date': '2018-03-29',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RV'\n },\n {\n 'date': '2018-03-29',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'RV'\n },\n {\n 'date': '2018-03-29',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'RV'\n },\n {\n 'date': '2018-03-29',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'RV'\n },\n {\n 'date': '2018-03-29',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'RV'\n },\n {\n 'date': '2018-03-29',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CN',\n 'type': 'RV'\n },\n {\n 'date': '2018-03-29',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'RV'\n },\n {\n 'date': '2018-03-29',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'RV'\n },\n {\n 'date': '2018-03-29',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'RV'\n },\n {\n 'date': '2018-03-29',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'RV'\n },\n {\n 'date': '2018-03-29',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'RV'\n },\n {\n 'date': '2018-03-29',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RV'\n },\n {\n 'date': '2018-03-29',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RV'\n },\n {\n 'date': '2018-03-29',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'RV'\n },\n {\n 'date': '2018-03-29',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'RV'\n },\n {\n 'date': '2018-03-30',\n 'description': 'Viernes Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-01',\n 'description': 'Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-02',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'RV'\n },\n {\n 'date': '2018-04-02',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'RV'\n },\n {\n 'date': '2018-04-02',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RV'\n },\n {\n 'date': '2018-04-02',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'RV'\n },\n {\n 'date': '2018-04-02',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'RV'\n },\n {\n 'date': '2018-04-23',\n 'description': 'Fiesta de Castilla y León',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'F'\n },\n {\n 'date': '2018-04-23',\n 'description': 'San Jorge / Día de Aragón',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RF'\n },\n {\n 'date': '2018-05-01',\n 'description': 'Fiesta del Trabajo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-05-02',\n 'description': 'Fiesta de la Comunidad de Madrid',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'F'\n },\n {\n 'date': '2018-05-17',\n 'description': 'Día de las Letras Gallegas',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'F'\n },\n {\n 'date': '2018-05-30',\n 'description': 'Día de Canarias',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CN',\n 'type': 'F'\n },\n {\n 'date': '2018-05-31',\n 'description': 'Día de Castilla-La Mancha',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'F'\n },\n {\n 'date': '2018-06-09',\n 'description': 'Día de la Región de Murcia',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'F'\n },\n {\n 'date': '2018-06-09',\n 'description': 'Día de La Rioja',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'F'\n },\n {\n 'date': '2018-07-25',\n 'description': 'Santiago Apóstol / Día Nacional de Galicia',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'RF'\n },\n {\n 'date': '2018-07-28',\n 'description': 'Día de las Instituciones de Cantabria',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'F'\n },\n {\n 'date': '2018-08-15',\n 'description': 'Asunción de la Virgen',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-08-22',\n 'description': 'Fiesta del Sacrificio (Aid El Kebir)',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RV'\n },\n {\n 'date': '2018-08-22',\n 'description': 'Fiesta del Sacrificio (Eidul Adha)',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'RV'\n },\n {\n 'date': '2018-09-08',\n 'description': 'Día de Asturias',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'F'\n },\n {\n 'date': '2018-09-08',\n 'description': 'Día de Extremadura',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'F'\n },\n {\n 'date': '2018-09-11',\n 'description': 'Fiesta Nacional de Cataluña',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'F'\n },\n {\n 'date': '2018-09-15',\n 'description': 'La Bien Aparecida',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'RF'\n },\n {\n 'date': '2018-10-09',\n 'description': 'Día de la Comunitat Valenciana',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'F'\n },\n {\n 'date': '2018-10-12',\n 'description': 'Fiesta Nacional de España',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-11-01',\n 'description': 'Todos los Santos',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-12-06',\n 'description': 'Día de la Constitución Española',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-12-08',\n 'description': 'Inmaculada Concepción',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-12-25',\n 'description': 'Natividad del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'RF'\n }\n]" }, { "alpha_fraction": 0.5955487489700317, "alphanum_fraction": 0.6438986659049988, "avg_line_length": 39.71875, "blob_id": "dfc555b8f91b8974e43a11268210f05ee92a8512", "content_id": "82f48660ca2aa9640dc9142ae644174de7b0a9c6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1310, "license_type": "permissive", "max_line_length": 115, "num_lines": 32, "path": "/src/holidata/holidays/de-CH.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom .holidays import Locale\n\n\nclass de_CH(Locale):\n \"\"\"\n 01-01: [NF] Neujahrstag\n 01-02: [BE,JU,TG,VD] [F] Berchtoldstag\n 01-06: [SZ,TI,UR] [RF] Heilige Drei Könige\n 03-19: [NW,SZ,TI,UR,VS] [RF] Josefstag\n 05-01: [BL,BS,GR,NE,SH,TG,TI,ZH] [F] Tag der Arbeit\n 08-01: [NF] Bundesfeier\n 08-15: [AI] [RF] Mariä Himmelfahrt\n 08-15: [JU,LU,NW,OW,SZ,TI,UR,VS,ZG] [RF] Mariä Himmelfahrt\n 11-01: [AI,GL,JU,LU,NW,OW,SG,SZ,TI,UR,VS,ZG] [RF] Allerheiligen\n 12-08: [AI] [RF] Mariä Empfängnis\n 12-08: [LU,NW,OW,SZ,TI,UR,VS,ZG] [RF] Mariä Empfängnis\n 12-25: [NRF] Weihnachtstag\n 12-26: [AI,AR,BE,BL,BS,GL,GR,LU,SG,SH,SZ,TG,TI,UR,ZH] [RF] Stephanstag\n 2 days before Easter: [AG,AI,AR,BE,BL,BS,FR,GE,GL,GR,JU,LU,NE,NW,OW,SG,SH,SO,SZ,TG,UR,VD,ZG,ZH] [RV] Karfreitag\n Easter: [NRV] Ostersonntag\n 1 day after Easter: [AI,AR,BE,BL,BS,GE,GL,GR,JU,SG,SH,SZ,TG,TI,UR,VD,ZH] [RV] Ostermontag\n 39 days after Easter: [NRV] Auffahrt\n 49 days after Easter: [NRV] Pfingstsonntag\n 50 days after Easter: [AI,AR,BE,BL,BS,GE,GL,GR,JU,SG,SH,SZ,TG,TI,UR,VD,ZH] [RV] Pfingstmontag\n 60 days after Easter: [AI,JU,LU,NW,OW,SZ,TI,UR,VS,ZG] [RV] Fronleichnam\n \"\"\"\n\n locale = \"de-CH\"\n easter_type = EASTER_WESTERN\n" }, { "alpha_fraction": 0.7228514552116394, "alphanum_fraction": 0.738174557685852, "avg_line_length": 52.60714340209961, "blob_id": "891077f81c99c75a60f93a24125a7bb3429c14cc", "content_id": "b2eb7b4d55bfe0271b49a36bb1490032a054c6ae", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": true, "language": "Markdown", "length_bytes": 1501, "license_type": "permissive", "max_line_length": 194, "num_lines": 28, "path": "/.github/ISSUE_TEMPLATE/locale-request.md", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "---\nname: Locale request\nabout: Request a new locale\ntitle: 'Add locale for LANG-COUNTRY'\nlabels: 'locale'\nassignees: ''\n\n---\n\n**Basic information**\n* What is the language code according to [ISO 639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes)?\n* What is the country code according to [ISO 3166-2](https://en.wikipedia.org/wiki/ISO_3166-2)?\n\n(please replace `LANG` and `COUNTRY` in the issue title accordingly)\n\n* Provide a list of all holidays, i.e. days \"which are defined by law on which business or work are suspended or reduced\"\n* Are holidays moved to a different date, e.g. if they fall on a saturday/sunday/...? If yes, which holidays and what is the algorithm?\n* Are holidays observed on a different date (is there a substitute holiday), e.g. if they fall on a saturday/sunday/...? If yes, which holidays and what is the algorithm?\n* If possible, provide the legal sources which define the holidays.\n\n**For each holiday**\n* What is the official name?\n* Is it a regional or nation-wide holiday?\n* If regional, what are the regions the holiday is observed in? (Note: currently only regions defined in ISO_3166-2 are supported)\n* Is it a fixed or variable date?\n* If fixed, provide the date in format `MM-DD`\n* If a holiday has a variable date: What is the algorithm to calculate it (e.g. `nth <weekday>/<day> in <month>`,...? If it depends on the easter date: Is it the western or orthodox easter date?\n* If the holiday is proclaimed by the government: Can you provide the source?\n" }, { "alpha_fraction": 0.5982142686843872, "alphanum_fraction": 0.5982142686843872, "avg_line_length": 15, "blob_id": "2936ec8855703bfd4e06ea4b167379c8337716f5", "content_id": "6f4dbda5cde0f710c9481d09e3bfedc4ab05971c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 112, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/ES.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass ES(Country):\n id = \"ES\"\n languages = [\"es\"]\n default_lang = \"es\"\n" }, { "alpha_fraction": 0.3314873278141022, "alphanum_fraction": 0.37579113245010376, "avg_line_length": 21.1842098236084, "blob_id": "e3dc2a7db77f6bb9b5e17ca1d6d24ec36f06f00f", "content_id": "49b6a14511c0f9640756555d35da4b4af9914adc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2768, "license_type": "permissive", "max_line_length": 93, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2020] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2020-01-01',\n 'description': 'Πρωτοχρονιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-01-06',\n 'description': 'Θεοφάνεια',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-03-02',\n 'description': 'Καθαρά Δευτέρα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-03-25',\n 'description': 'Ευαγγελισμός της Θεοτόκου και Εθνική Ημέρα Ανεξαρτησίας της Ελλάδας',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-04-17',\n 'description': 'Μεγάλη Παρασκευή',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-18',\n 'description': 'Μεγάλο Σάββατο',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-19',\n 'description': 'Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-20',\n 'description': 'Δευτέρα του Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-01',\n 'description': 'Πρωτομαγιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-06-08',\n 'description': 'Δευτέρα του Αγίου Πνεύματος',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-08-15',\n 'description': 'Κοίμηση της Θεοτόκου',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-10-28',\n 'description': 'Ημέρα του ΌΧΙ',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-12-25',\n 'description': 'Χριστούγεννα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-26',\n 'description': 'Επόμενη ημέρα Χριστουγέννων',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.30519479513168335, "alphanum_fraction": 0.35304170846939087, "avg_line_length": 20.21014404296875, "blob_id": "440db16132cd69d8672d96c58dfc91856996b195", "content_id": "a385cb406ab0f639a94509977be1e57d2ccaa46f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2934, "license_type": "permissive", "max_line_length": 46, "num_lines": 138, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': 'Nyttårsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-03-06',\n 'description': 'Fastelavn',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-17',\n 'description': 'Palmesøndag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-21',\n 'description': 'Skjærtorsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-22',\n 'description': 'Langfredag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-24',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-25',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-05-01',\n 'description': 'Offentlig Høytidsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-05-08',\n 'description': 'Frigjøringsdag 1945',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-05-17',\n 'description': 'Grunnlovsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-02',\n 'description': 'Kristi Himmelfartsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-12',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-13',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-12-24',\n 'description': 'Julaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-26',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-31',\n 'description': 'Nyttårsaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.6825181245803833, "alphanum_fraction": 0.6843296885490417, "avg_line_length": 37.068965911865234, "blob_id": "f82f018b332001b56f4518b82c6343c8e39e82b8", "content_id": "15a93e07364050f6c679fe639eecc4580af85915", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2208, "license_type": "permissive", "max_line_length": 236, "num_lines": 58, "path": "/tests/test_holidays.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "import re\n\nimport pytest\n\nfrom holidata import Locale, Country\nfrom tests import HOLIDATA_YEAR_MAX\n\n\[email protected](params=range(2011, HOLIDATA_YEAR_MAX))\ndef year(request):\n return request.param\n\n\[email protected](params=Locale.plugins)\ndef locale(request, year):\n return request.param(year)\n\n\[email protected]()\ndef holidays(locale):\n return locale.holidays\n\n\[email protected](params=Country.plugins)\ndef country(request):\n return request.param()\n\n\ndef test_country_should_be_constructable(country):\n pass\n\n\ndef test_holiday_should_not_be_of_type_national_if_region_defined(holidays):\n for holiday in holidays:\n if holiday.region == '':\n assert \"N\" in holiday.flags, \"Holiday '{}' ({}) in locale {} must have flag 'N': it has no regions defined\".format(holiday.description, holiday.date.strftime(\"%Y-%m-%d\"), holiday.locale)\n\n\ndef test_holiday_should_be_of_type_national_if_no_region_defined(holidays):\n for holiday in holidays:\n if holiday.region != '':\n assert \"N\" not in holiday.flags, \"Holiday '{}' ({}) in locale {} must not have flag 'N': it has regions defined\".format(holiday.description, holiday.date.strftime(\"%Y-%m-%d\"), holiday.locale)\n\n\ndef test_holiday_should_be_of_type_either_fixed_or_variable(holidays):\n for holiday in holidays:\n date_is_fixed = \"F\" in holiday.flags\n date_is_variable = \"V\" in holiday.flags\n\n assert not (date_is_variable and date_is_fixed), \"Holiday '{}' ({}) in locale {} must not have both flags 'F' and 'V'\".format(holiday.description, holiday.date.strftime(\"%Y-%m-%d\"), holiday.locale)\n assert (date_is_variable or date_is_fixed), \"Holiday '{}' ({}) in locale {} must have either flag 'F' or 'V'\".format(holiday.description, holiday.date.strftime(\"%Y-%m-%d\"), holiday.locale)\n\n\ndef test_holiday_flags_should_be_in_the_correct_order(holidays):\n for holiday in holidays:\n match = re.search(r\"^N?R?[FV]?$\", \"{}\".format(holiday.flags))\n\n assert match is not None, \"Flags for holiday '{}' ({}) in locale {} are not in the correct order. Flags '{}' should match 'N?R?[FV]?'\".format(holiday.description, holiday.date.strftime(\"%Y-%m-%d\"), holiday.locale, holiday.flags)\n" }, { "alpha_fraction": 0.3191857933998108, "alphanum_fraction": 0.36422693729400635, "avg_line_length": 20.79245376586914, "blob_id": "551d840245f3a926991a2935955afc763340a373", "content_id": "03aaf26afa8c80c21f7ff5d5bdf2d5c8194e0f68", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2315, "license_type": "permissive", "max_line_length": 89, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hr_HR-2018] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2018-01-01',\n 'description': 'Nova Godina',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-01-06',\n 'description': 'Sveta tri kralja',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-04-01',\n 'description': 'Uskrs',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-02',\n 'description': 'Uskršnji ponedjeljak',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-05-01',\n 'description': 'Praznik rada',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-05-30',\n 'description': 'Dan državnosti',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-05-31',\n 'description': 'Tijelovo',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-06-22',\n 'description': 'Dan antifašističke borbe',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-08-05',\n 'description': 'Dan pobjede i domovinske zahvalnosti i Dan hrvatskih branitelja',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-08-15',\n 'description': 'Velika Gospa',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-11-01',\n 'description': 'Dan svih svetih',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-12-25',\n 'description': 'Božić',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-12-26',\n 'description': 'Sveti Stjepan',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31389227509498596, "alphanum_fraction": 0.35925474762916565, "avg_line_length": 20.66666603088379, "blob_id": "9428878fa774c204e30e1d9e7074609b7bf85a86", "content_id": "86d0182a248f9ced4b438cfdae9896f89fa05d09", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2469, "license_type": "permissive", "max_line_length": 49, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-01-17',\n 'description': 'Birthday of Martin Luther King, Jr.',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2011-02-21',\n 'description': \"Washington's Birthday\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2011-04-18',\n 'description': \"Patriots' Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': 'MA',\n 'type': 'V'\n },\n {\n 'date': '2011-04-18',\n 'description': \"Patriots' Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': 'ME',\n 'type': 'V'\n },\n {\n 'date': '2011-05-30',\n 'description': 'Memorial Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2011-07-04',\n 'description': 'Independence Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-09-05',\n 'description': 'Labor Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2011-10-10',\n 'description': 'Columbus Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2011-11-11',\n 'description': 'Veterans Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-11-24',\n 'description': 'Thanksgiving Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2011-11-25',\n 'description': 'Day after Thanksgiving',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2011-12-24',\n 'description': 'Christmas Eve',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.32159623503685, "alphanum_fraction": 0.36760562658309937, "avg_line_length": 20.744897842407227, "blob_id": "5ebb24da4e2203999f48621682c8d9e2317b8e1a", "content_id": "2a70de8191cfc8165f4bc31b8ef7c43701d3edbf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2149, "license_type": "permissive", "max_line_length": 67, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2018] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2018-01-01',\n 'description': 'Uusaasta',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-02-24',\n 'description': 'Iseseisvuspäev, Eesti Vabariigi aastapäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-03-30',\n 'description': 'Suur reede',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-01',\n 'description': 'Ülestõusmispühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-05-01',\n 'description': 'Kevadpüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-05-20',\n 'description': 'Nelipühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-06-23',\n 'description': 'Võidupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-06-24',\n 'description': 'Jaanipäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-08-20',\n 'description': 'Taasiseseisvumispäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-12-24',\n 'description': 'Jõululaupäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-12-25',\n 'description': 'Esimene jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-12-26',\n 'description': 'Teine jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3056092858314514, "alphanum_fraction": 0.3520309329032898, "avg_line_length": 20.112245559692383, "blob_id": "3edf9113987cbf054ff46628a39c26d28dd0b3c1", "content_id": "3d17ee5e8d8968b4976fbc81961210b0618a673b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2068, "license_type": "permissive", "max_line_length": 42, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-04-17',\n 'description': 'Ostern',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Ostermontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-26',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-05',\n 'description': 'Pfingsten',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pfingstmontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-07-21',\n 'description': 'Nationalfeiertag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-11-11',\n 'description': 'Waffenstillstand',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Weihnacht',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3314873278141022, "alphanum_fraction": 0.37579113245010376, "avg_line_length": 21.1842098236084, "blob_id": "93cebb8c7dbbc4b6eb15f461f65a73d17c41eb69", "content_id": "9241f15ba122f66802328e5ff92453184e7e6713", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2768, "license_type": "permissive", "max_line_length": 93, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': 'Πρωτοχρονιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-01-06',\n 'description': 'Θεοφάνεια',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-03-07',\n 'description': 'Καθαρά Δευτέρα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-03-25',\n 'description': 'Ευαγγελισμός της Θεοτόκου και Εθνική Ημέρα Ανεξαρτησίας της Ελλάδας',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-04-22',\n 'description': 'Μεγάλη Παρασκευή',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-23',\n 'description': 'Μεγάλο Σάββατο',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-24',\n 'description': 'Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-25',\n 'description': 'Δευτέρα του Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-05-01',\n 'description': 'Πρωτομαγιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-13',\n 'description': 'Δευτέρα του Αγίου Πνεύματος',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-08-15',\n 'description': 'Κοίμηση της Θεοτόκου',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-10-28',\n 'description': 'Ημέρα του ΌΧΙ',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Χριστούγεννα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-26',\n 'description': 'Επόμενη ημέρα Χριστουγέννων',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31389227509498596, "alphanum_fraction": 0.35925474762916565, "avg_line_length": 20.66666603088379, "blob_id": "d7e0ac6716d720838c2d7ba5e61c815ab789dfdb", "content_id": "bf827b19482886addc30725087d2a110f34d5b55", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2469, "license_type": "permissive", "max_line_length": 49, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-01-18',\n 'description': 'Birthday of Martin Luther King, Jr.',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-02-15',\n 'description': \"Washington's Birthday\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-04-19',\n 'description': \"Patriots' Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': 'MA',\n 'type': 'V'\n },\n {\n 'date': '2021-04-19',\n 'description': \"Patriots' Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': 'ME',\n 'type': 'V'\n },\n {\n 'date': '2021-05-31',\n 'description': 'Memorial Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-07-04',\n 'description': 'Independence Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-09-06',\n 'description': 'Labor Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-10-11',\n 'description': 'Columbus Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-11-11',\n 'description': 'Veterans Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-11-25',\n 'description': 'Thanksgiving Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-11-26',\n 'description': 'Day after Thanksgiving',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-12-24',\n 'description': 'Christmas Eve',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.30265551805496216, "alphanum_fraction": 0.34921789169311523, "avg_line_length": 20.153846740722656, "blob_id": "23b798126b81cec3cd3d78b4014e21dae38b90ac", "content_id": "6fb495417a427f36e5fd5fee032644efedc7dc78", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2753, "license_type": "permissive", "max_line_length": 42, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': 'Nyårsdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-01-06',\n 'description': 'Trettondedag jul',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-04-03',\n 'description': 'Långfredagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-05',\n 'description': 'Påskdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-06',\n 'description': 'Annandag påsk',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-01',\n 'description': 'Första maj',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-14',\n 'description': 'Kristi himmelsfärdsdag',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-24',\n 'description': 'Pingstdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-06-06',\n 'description': 'Nationaldagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-06-19',\n 'description': 'Midsommarafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2015-06-20',\n 'description': 'Midsommardagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2015-10-31',\n 'description': 'Alla helgons dag',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-12-24',\n 'description': 'Julafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Juldagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-26',\n 'description': 'Annandag jul',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-31',\n 'description': 'Nyårsafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3280318081378937, "alphanum_fraction": 0.3720032870769501, "avg_line_length": 21.624338150024414, "blob_id": "1a5958d44dfee5c01579cbe900e9d389b28188d2", "content_id": "22092f5affd6bd7e6bf9c2e7f12258c291ee2b77", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8586, "license_type": "permissive", "max_line_length": 57, "num_lines": 378, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_CA-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': \"Jour de l'An\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-02-17',\n 'description': 'Fête de la Famille',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'AB',\n 'type': 'V'\n },\n {\n 'date': '2014-02-17',\n 'description': 'Fête de la Famille',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'ON',\n 'type': 'V'\n },\n {\n 'date': '2014-02-17',\n 'description': 'Fête de la Famille',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'SK',\n 'type': 'V'\n },\n {\n 'date': '2014-02-17',\n 'description': 'Fête de la Famille',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NB',\n 'type': 'V'\n },\n {\n 'date': '2014-02-17',\n 'description': 'Journée Louis Riel',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'MB',\n 'type': 'V'\n },\n {\n 'date': '2014-02-17',\n 'description': 'Fête des Insulaires',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'PE',\n 'type': 'V'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Vendredi Saint',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'AB',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'PE',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'QC',\n 'type': 'RV'\n },\n {\n 'date': '2014-05-19',\n 'description': 'Fête de la Reine Victoria',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'AB',\n 'type': 'V'\n },\n {\n 'date': '2014-05-19',\n 'description': 'Fête de la Reine Victoria',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'BC',\n 'type': 'V'\n },\n {\n 'date': '2014-05-19',\n 'description': 'Fête de la Reine Victoria',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'MB',\n 'type': 'V'\n },\n {\n 'date': '2014-05-19',\n 'description': 'Fête de la Reine Victoria',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NS',\n 'type': 'V'\n },\n {\n 'date': '2014-05-19',\n 'description': 'Fête de la Reine Victoria',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'ON',\n 'type': 'V'\n },\n {\n 'date': '2014-05-19',\n 'description': 'Fête de la Reine Victoria',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'SK',\n 'type': 'V'\n },\n {\n 'date': '2014-05-19',\n 'description': 'Fête de la Reine Victoria',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NT',\n 'type': 'V'\n },\n {\n 'date': '2014-05-19',\n 'description': 'Fête de la Reine Victoria',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NU',\n 'type': 'V'\n },\n {\n 'date': '2014-05-19',\n 'description': 'Fête de la Reine Victoria',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'YT',\n 'type': 'V'\n },\n {\n 'date': '2014-05-19',\n 'description': 'Journée Nationale des Patriotes',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'QC',\n 'type': 'V'\n },\n {\n 'date': '2014-06-24',\n 'description': 'Fête Nationale',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'QC',\n 'type': 'F'\n },\n {\n 'date': '2014-07-01',\n 'description': 'Fête du Canada',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-08-04',\n 'description': \"Premier lundi d'août\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NT',\n 'type': 'V'\n },\n {\n 'date': '2014-08-04',\n 'description': \"Premier lundi d'août\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NU',\n 'type': 'V'\n },\n {\n 'date': '2014-08-04',\n 'description': 'Fête du Patrimoine',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'AB',\n 'type': 'V'\n },\n {\n 'date': '2014-08-04',\n 'description': 'Fête de la Saskatchewan',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'SK',\n 'type': 'V'\n },\n {\n 'date': '2014-08-04',\n 'description': 'Jour de la Fondation',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NS',\n 'type': 'V'\n },\n {\n 'date': '2014-08-04',\n 'description': 'Jour du Nouveau-Brunswick',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NB',\n 'type': 'V'\n },\n {\n 'date': '2014-09-01',\n 'description': 'Fête du Travail',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-10-13',\n 'description': \"Jour de l'Action de grâce\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'AB',\n 'type': 'V'\n },\n {\n 'date': '2014-10-13',\n 'description': \"Jour de l'Action de grâce\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'BC',\n 'type': 'V'\n },\n {\n 'date': '2014-10-13',\n 'description': \"Jour de l'Action de grâce\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'MB',\n 'type': 'V'\n },\n {\n 'date': '2014-10-13',\n 'description': \"Jour de l'Action de grâce\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NL',\n 'type': 'V'\n },\n {\n 'date': '2014-10-13',\n 'description': \"Jour de l'Action de grâce\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'ON',\n 'type': 'V'\n },\n {\n 'date': '2014-10-13',\n 'description': \"Jour de l'Action de grâce\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'QC',\n 'type': 'V'\n },\n {\n 'date': '2014-10-13',\n 'description': \"Jour de l'Action de grâce\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'SK',\n 'type': 'V'\n },\n {\n 'date': '2014-10-13',\n 'description': \"Jour de l'Action de grâce\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NT',\n 'type': 'V'\n },\n {\n 'date': '2014-10-13',\n 'description': \"Jour de l'Action de grâce\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NU',\n 'type': 'V'\n },\n {\n 'date': '2014-10-13',\n 'description': \"Jour de l'Action de grâce\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'YT',\n 'type': 'V'\n },\n {\n 'date': '2014-11-11',\n 'description': 'Jour du Souvenir',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'AB',\n 'type': 'F'\n },\n {\n 'date': '2014-11-11',\n 'description': 'Jour du Souvenir',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'BC',\n 'type': 'F'\n },\n {\n 'date': '2014-11-11',\n 'description': 'Jour du Souvenir',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NB',\n 'type': 'F'\n },\n {\n 'date': '2014-11-11',\n 'description': 'Jour du Souvenir',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NL',\n 'type': 'F'\n },\n {\n 'date': '2014-11-11',\n 'description': 'Jour du Souvenir',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NT',\n 'type': 'F'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Jour de Noël',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Lendemain de Noël',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.320070743560791, "alphanum_fraction": 0.3648688495159149, "avg_line_length": 21.03896141052246, "blob_id": "7b0c554838740c7f1ca5778288d2b992af81cb37", "content_id": "de99bc0cc148d305f2426235ed5f1a73c93db866", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3406, "license_type": "permissive", "max_line_length": 53, "num_lines": 154, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Año Nuevo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-01-06',\n 'description': 'Día de los Reyes Magos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-03-24',\n 'description': 'Día de San José',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-17',\n 'description': 'Jueves Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Viernes Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-20',\n 'description': 'Domingo de Pascua',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Día del Trabajo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-06-02',\n 'description': 'La Ascensión del Señor',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-23',\n 'description': 'Corpus Christi',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-30',\n 'description': 'El Sagrado Corazón de Jesús',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-30',\n 'description': 'San Pedro y San Pablo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-07-20',\n 'description': 'Grito de Independencia',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-08-07',\n 'description': 'Batalla de Boyacá',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-08-18',\n 'description': 'Asunción de la Virgen',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-10-13',\n 'description': 'Día de la Raza',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-11-03',\n 'description': 'Todos los Santos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-11-17',\n 'description': 'Independencia de Cartagena',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-12-08',\n 'description': 'Inmaculada Concepción',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Navidad',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.30687829852104187, "alphanum_fraction": 0.35305434465408325, "avg_line_length": 20.224489212036133, "blob_id": "d5be23d5eae3c4ce9b496c6255fd69d4585194af", "content_id": "e48584db57f3a077f67b1a7b234b65f81ce81c87", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2086, "license_type": "permissive", "max_line_length": 45, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Nouvel An',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-04-04',\n 'description': 'Pâques',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Fête du Travail',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-13',\n 'description': 'Ascension',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-23',\n 'description': 'Pentecôte',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Lundi de Pentecôte',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-07-21',\n 'description': 'Fête nationale',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-08-15',\n 'description': 'Assomption',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Toussaint',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-11-11',\n 'description': \"Jour de l'armistice\",\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Noël',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.5982142686843872, "alphanum_fraction": 0.5982142686843872, "avg_line_length": 15, "blob_id": "ede2885fba4a4a12149a2b215d53cd0923999cd4", "content_id": "b846313480caa7c8a1d2ab865358b7f8800111f1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 112, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/IT.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass IT(Country):\n id = \"IT\"\n languages = [\"it\"]\n default_lang = \"it\"\n" }, { "alpha_fraction": 0.31389227509498596, "alphanum_fraction": 0.35925474762916565, "avg_line_length": 20.66666603088379, "blob_id": "ba068699c4aa81fd1cd9f8057fc5abdcd14db7be", "content_id": "ad2e30a4c025f67195cb8a51b0ca1d264f7faa83", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2469, "license_type": "permissive", "max_line_length": 49, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-01-18',\n 'description': 'Birthday of Martin Luther King, Jr.',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2016-02-15',\n 'description': \"Washington's Birthday\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2016-04-18',\n 'description': \"Patriots' Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': 'MA',\n 'type': 'V'\n },\n {\n 'date': '2016-04-18',\n 'description': \"Patriots' Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': 'ME',\n 'type': 'V'\n },\n {\n 'date': '2016-05-30',\n 'description': 'Memorial Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2016-07-04',\n 'description': 'Independence Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-09-05',\n 'description': 'Labor Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2016-10-10',\n 'description': 'Columbus Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2016-11-11',\n 'description': 'Veterans Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-11-24',\n 'description': 'Thanksgiving Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2016-11-25',\n 'description': 'Day after Thanksgiving',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2016-12-24',\n 'description': 'Christmas Eve',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3079564869403839, "alphanum_fraction": 0.35374927520751953, "avg_line_length": 20.317073822021484, "blob_id": "9e25dfffd1c678409a90275f71bca5cb69b518b5", "content_id": "55048e94ed495858842af24eef67a26bc3715181", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1747, "license_type": "permissive", "max_line_length": 50, "num_lines": 82, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-01-02',\n 'description': \"Day after New Year's Day\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-02-06',\n 'description': 'Waitangi Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Good Friday',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Easter Monday',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-25',\n 'description': 'ANZAC Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-06-02',\n 'description': \"Queen's Birthday\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-10-27',\n 'description': 'Labour Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Boxing Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.32159623503685, "alphanum_fraction": 0.36760562658309937, "avg_line_length": 20.744897842407227, "blob_id": "bca59ea179583317bd750df048d26143d2a83efa", "content_id": "6b64ae087d72f1495a9889f7457ee627bcf9ca30", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2149, "license_type": "permissive", "max_line_length": 67, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': 'Uusaasta',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-02-24',\n 'description': 'Iseseisvuspäev, Eesti Vabariigi aastapäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-04-07',\n 'description': 'Suur reede',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-09',\n 'description': 'Ülestõusmispühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-01',\n 'description': 'Kevadpüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-28',\n 'description': 'Nelipühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-06-23',\n 'description': 'Võidupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-06-24',\n 'description': 'Jaanipäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-08-20',\n 'description': 'Taasiseseisvumispäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-12-24',\n 'description': 'Jõululaupäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Esimene jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-12-26',\n 'description': 'Teine jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3191489279270172, "alphanum_fraction": 0.3645389974117279, "avg_line_length": 20.59183692932129, "blob_id": "1e924450c5a26c6001bc95806ec4aa40c6bff884", "content_id": "e32f08df0f87687457d0d1347513cad1be70e739", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2115, "license_type": "permissive", "max_line_length": 53, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_BE-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Nieuwjaar',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-04-04',\n 'description': 'Pasen',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Paasmaandag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Dag van de arbeid',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-13',\n 'description': 'Onze Lieve Heer hemelvaart',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-23',\n 'description': 'Pinksteren',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pinkstermaandag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-07-21',\n 'description': 'Nationale feestdag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-08-15',\n 'description': 'Onze Lieve Vrouw hemelvaart',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-11-11',\n 'description': 'Wapenstilstand',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Kerstmis',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.32159623503685, "alphanum_fraction": 0.36760562658309937, "avg_line_length": 20.744897842407227, "blob_id": "828dff85462823e2508f7990792df44c7572366c", "content_id": "3d96740c35efa0fdac013a2d6d9aa0f12fd8cf05", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2149, "license_type": "permissive", "max_line_length": 67, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2020] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2020-01-01',\n 'description': 'Uusaasta',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-02-24',\n 'description': 'Iseseisvuspäev, Eesti Vabariigi aastapäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-04-10',\n 'description': 'Suur reede',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-12',\n 'description': 'Ülestõusmispühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-01',\n 'description': 'Kevadpüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-31',\n 'description': 'Nelipühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-06-23',\n 'description': 'Võidupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-06-24',\n 'description': 'Jaanipäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-08-20',\n 'description': 'Taasiseseisvumispäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-12-24',\n 'description': 'Jõululaupäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-12-25',\n 'description': 'Esimene jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-12-26',\n 'description': 'Teine jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.5982142686843872, "alphanum_fraction": 0.5982142686843872, "avg_line_length": 15, "blob_id": "d413c93581aabf54063c397d8bf279d0ecbb82dd", "content_id": "7f02ec27b9d6c0e38bbcc5b171ca916b289c9f7d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 112, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/GR.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass GR(Country):\n id = \"GR\"\n languages = [\"el\"]\n default_lang = \"el\"\n" }, { "alpha_fraction": 0.5789733529090881, "alphanum_fraction": 0.5888450145721436, "avg_line_length": 28.794116973876953, "blob_id": "0c3268792c861ce16dc4dc277af08010806dce00", "content_id": "0a9f9362509c958811f3bfa037f076bb84520f2e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2026, "license_type": "permissive", "max_line_length": 115, "num_lines": 68, "path": "/bin/holidata", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# encoding: utf-8\n\n\"\"\"\nHolidata - generate holidata files.\n\nUsage:\n holidata (--year=<value>) (--locale=<value>) [--output=<value>]\n holidata (--year=<value>) (--country=<value>) [--lang=<value>] [--output=<value>]\n\nOptions:\n --year=<value>\n Specify which year to generate data for.\n Note: Holidata generates valid data from 2011.\n\n --locale=<id>\n Specify the locale for which data should be generated.\n The locale <id> is a combination of language <id> and country <id>.\n\n --country=<id>\n Specify the country for which data should be generated.\n The country <id> has to be from ISO 3166-1 alpha-2.\n\n --lang=<id>\n Specify the language in which data should be generated (requires --country).\n Not needed if the country has a default language defined.\n The language <id> has to be from ISO 639-1.\n\n --output=(csv|json|yaml|xml)\n Specify the output format [default: csv].\n\nDependencies:\n pip3 install arrow docopt\n\"\"\"\nimport re\nimport sys\n\nfrom docopt import docopt\n\nfrom holidata import Holidata\n\nif __name__ == '__main__':\n args = docopt(__doc__)\n\n try:\n if args['--locale'] is not None:\n locale_id = args['--locale']\n locale_regex = re.compile(r'^(?P<lang>[a-zA-Z]{2})[-_](?P<country>[a-zA-Z]{2})$')\n m = locale_regex.search(locale_id)\n\n if m is None:\n raise ValueError(\"'{}' is not a valid locale!\".format(locale_id))\n\n country_id = m.group('country').upper()\n lang_id = m.group('lang').lower()\n\n elif args['--country'] is not None:\n country_id = args['--country'].upper()\n lang_id = args['--lang']\n\n else:\n # When neither '--locale' nor '--country' are given, docopt prints usage\n sys.exit(1)\n\n print(Holidata(country=country_id, language=lang_id, year=args['--year'], output=args['--output']), end=\"\")\n\n except ValueError as e:\n sys.exit(e)\n" }, { "alpha_fraction": 0.32159623503685, "alphanum_fraction": 0.36760562658309937, "avg_line_length": 20.744897842407227, "blob_id": "8ff7d11c33245936b6c58cead4df0e6959e6908e", "content_id": "ccb3f66bc84d90a50762171f5031a012e5b9109f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2149, "license_type": "permissive", "max_line_length": 67, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': 'Uusaasta',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-02-24',\n 'description': 'Iseseisvuspäev, Eesti Vabariigi aastapäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-04-22',\n 'description': 'Suur reede',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-24',\n 'description': 'Ülestõusmispühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-05-01',\n 'description': 'Kevadpüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-12',\n 'description': 'Nelipühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-23',\n 'description': 'Võidupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-24',\n 'description': 'Jaanipäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-08-20',\n 'description': 'Taasiseseisvumispäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-12-24',\n 'description': 'Jõululaupäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Esimene jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-12-26',\n 'description': 'Teine jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.30519479513168335, "alphanum_fraction": 0.35304170846939087, "avg_line_length": 20.21014404296875, "blob_id": "0a9fae7d35487593708726d89a56ee136c3b88ba", "content_id": "9fa60522a17233e64ba235f2e800f56ddd3210da", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2934, "license_type": "permissive", "max_line_length": 46, "num_lines": 138, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2012] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2012-01-01',\n 'description': 'Nyttårsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-02-19',\n 'description': 'Fastelavn',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-01',\n 'description': 'Palmesøndag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-05',\n 'description': 'Skjærtorsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-06',\n 'description': 'Langfredag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-08',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-09',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-01',\n 'description': 'Offentlig Høytidsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-08',\n 'description': 'Frigjøringsdag 1945',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-17',\n 'description': 'Grunnlovsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-17',\n 'description': 'Kristi Himmelfartsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-27',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-28',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-12-24',\n 'description': 'Julaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-25',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-26',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-31',\n 'description': 'Nyttårsaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31302428245544434, "alphanum_fraction": 0.35894039273262024, "avg_line_length": 20.37735939025879, "blob_id": "4b4642a6f57e7524ec2a2bc4b681b556141636ea", "content_id": "11bedb3e817ce5c3b221c5643ce3ddadd389b0af", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2273, "license_type": "permissive", "max_line_length": 47, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Nyårsdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-01-06',\n 'description': 'Trettondedagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Långfredagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-04',\n 'description': 'Påskdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Annandag påsk',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Första maj',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-13',\n 'description': 'Kristi himmelfärdsdag',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-23',\n 'description': 'Pingst',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-06-26',\n 'description': 'Midsommardagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-11-06',\n 'description': 'Alla helgons dag',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-12-06',\n 'description': 'Självständighetsdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Juldagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Annandag jul',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3192431628704071, "alphanum_fraction": 0.3643317222595215, "avg_line_length": 20.798246383666992, "blob_id": "92c58ca57883ccd0edcad8fa1745a194e0044327", "content_id": "ed33fac751dd38791343115c120d87f00317a41f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2495, "license_type": "permissive", "max_line_length": 54, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': 'Ano Novo',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-02-17',\n 'description': 'Carnaval',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-03',\n 'description': 'Sexta-feira Santa',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-05',\n 'description': 'Páscoa',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-25',\n 'description': 'Dia da Liberdade',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-01',\n 'description': 'Dia do Trabalhador',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-06-04',\n 'description': 'Corpo de Deus',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-06-10',\n 'description': 'Dia de Portugal',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-08-15',\n 'description': 'Assunção de Nossa Senhora',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-10-05',\n 'description': 'Implantação da República',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-11-01',\n 'description': 'Dia de Todos os Santos',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-01',\n 'description': 'Restauração da Independência',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-08',\n 'description': 'Imaculada Conceição',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Natal',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3136601746082306, "alphanum_fraction": 0.3593526780605316, "avg_line_length": 20.4489803314209, "blob_id": "79b9795437d9421d7396ae9843f293c1013ff7e9", "content_id": "ff596ef11d9ffb1049c1bbc641a8d0645ee34d1f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2101, "license_type": "permissive", "max_line_length": 49, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[it_IT-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Capodanno',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-01-06',\n 'description': 'Epifania',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-04-17',\n 'description': 'Pasqua',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Pasquetta',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-25',\n 'description': 'Festa della liberazione',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Festa del lavoro',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-06-02',\n 'description': 'Festa della repubblica',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Assunzione (ferragosto)',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Ognissanti',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-08',\n 'description': 'Immacolata concezione',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Natale',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'S.to Stefano',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.32024067640304565, "alphanum_fraction": 0.3657028377056122, "avg_line_length": 20.83941650390625, "blob_id": "06235aeb778c9fde498500088814363fb16d64a2", "content_id": "e1a07851b9c3246de63ad309cf1779d51c7eac89", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5987, "license_type": "permissive", "max_line_length": 51, "num_lines": 274, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_DE-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-01-06',\n 'description': 'Heilige drei Könige',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BW',\n 'type': 'RF'\n },\n {\n 'date': '2015-01-06',\n 'description': 'Heilige drei Könige',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BY',\n 'type': 'RF'\n },\n {\n 'date': '2015-01-06',\n 'description': 'Heilige drei Könige',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'ST',\n 'type': 'RF'\n },\n {\n 'date': '2015-04-03',\n 'description': 'Karfreitag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-05',\n 'description': 'Ostern',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-06',\n 'description': 'Ostermontag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-01',\n 'description': 'Erster Maifeiertag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-14',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-24',\n 'description': 'Pfingstsonntag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-25',\n 'description': 'Pfingstmontag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-06-04',\n 'description': 'Fronleichnam',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BW',\n 'type': 'RV'\n },\n {\n 'date': '2015-06-04',\n 'description': 'Fronleichnam',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BY',\n 'type': 'RV'\n },\n {\n 'date': '2015-06-04',\n 'description': 'Fronleichnam',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'HE',\n 'type': 'RV'\n },\n {\n 'date': '2015-06-04',\n 'description': 'Fronleichnam',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RV'\n },\n {\n 'date': '2015-06-04',\n 'description': 'Fronleichnam',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'RP',\n 'type': 'RV'\n },\n {\n 'date': '2015-06-04',\n 'description': 'Fronleichnam',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'SL',\n 'type': 'RV'\n },\n {\n 'date': '2015-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'SL',\n 'type': 'RF'\n },\n {\n 'date': '2015-10-03',\n 'description': 'Tag der Deutschen Einheit',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BB',\n 'type': 'RF'\n },\n {\n 'date': '2015-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'MV',\n 'type': 'RF'\n },\n {\n 'date': '2015-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'SN',\n 'type': 'RF'\n },\n {\n 'date': '2015-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'ST',\n 'type': 'RF'\n },\n {\n 'date': '2015-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'TH',\n 'type': 'RF'\n },\n {\n 'date': '2015-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BW',\n 'type': 'RF'\n },\n {\n 'date': '2015-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BY',\n 'type': 'RF'\n },\n {\n 'date': '2015-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RF'\n },\n {\n 'date': '2015-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'RP',\n 'type': 'RF'\n },\n {\n 'date': '2015-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'SL',\n 'type': 'RF'\n },\n {\n 'date': '2015-11-18',\n 'description': 'Buß- und Bettag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'SN',\n 'type': 'RV'\n },\n {\n 'date': '2015-12-24',\n 'description': 'Heilig Abend',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Weihnachtstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-26',\n 'description': 'Zweiter Weihnachtstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-31',\n 'description': 'Silvester',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3293954133987427, "alphanum_fraction": 0.37665045261383057, "avg_line_length": 21.146154403686523, "blob_id": "2e298af22051ab8f065e1d7f9f11657aab958a93", "content_id": "507d4e68f01eddf2990bcf87051e95bc8f0f5020", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2900, "license_type": "permissive", "max_line_length": 49, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2020] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2020-01-01',\n 'description': 'Nýársdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-04-09',\n 'description': 'Skírdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-10',\n 'description': 'Föstudagurinn langi',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-12',\n 'description': 'Páskadagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Annar dagur páska',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-23',\n 'description': 'Sumardagurinn fyrsti',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2020-05-01',\n 'description': 'Verkalýðsdagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-21',\n 'description': 'Uppstigningardagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-31',\n 'description': 'Hvítasunnudagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-06-01',\n 'description': 'Annar dagur hvítasunnu',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-06-17',\n 'description': 'Þjóðhátíðardagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-08-03',\n 'description': 'Frídagur verslunarmanna',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2020-12-24',\n 'description': 'Aðfangadagur jóla',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-25',\n 'description': 'Jóladagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-26',\n 'description': 'Annar dagur jóla',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-31',\n 'description': 'Gamlársdagur',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31142035126686096, "alphanum_fraction": 0.3574855923652649, "avg_line_length": 20.275510787963867, "blob_id": "ac80cd42eb764b4be4cd00e6599d475c1f1d7acd", "content_id": "4de017660f8c1e9d26c408b76a2511932e0af56c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2088, "license_type": "permissive", "max_line_length": 47, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2018] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2018-01-01',\n 'description': 'Nytårsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-03-29',\n 'description': 'Skærtorsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-03-30',\n 'description': 'Langfredag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-01',\n 'description': 'Påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-02',\n 'description': 'Anden påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-27',\n 'description': 'Store bededag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-05-10',\n 'description': 'Kristi himmelfartsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-05-20',\n 'description': 'Pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-05-21',\n 'description': 'Anden pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-06-05',\n 'description': 'Grundlovsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-12-25',\n 'description': 'Juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-12-26',\n 'description': 'Anden juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.5980795621871948, "alphanum_fraction": 0.672153651714325, "avg_line_length": 24.13793182373047, "blob_id": "2c3c26a2bd6a2e8709a5e638ff8c25ca55b95680", "content_id": "f833314334ddd552d8f02517e611b5dbfe75bd9d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 748, "license_type": "permissive", "max_line_length": 98, "num_lines": 29, "path": "/src/holidata/holidays/et-EE.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom .holidays import Locale\n\n\"\"\"\nsources\nhttps://www.riigiteataja.ee/akt/109032011007 (Public Holidays and Days of National Importance Act)\n\"\"\"\n\n\nclass et_EE(Locale):\n \"\"\"\n 01-01: [NF] Uusaasta\n 02-24: [NF] Iseseisvuspäev, Eesti Vabariigi aastapäev\n 05-01: [NF] Kevadpüha\n 06-23: [NF] Võidupüha\n 06-24: [NF] Jaanipäev\n 08-20: [NF] Taasiseseisvumispäev\n 12-24: [NF] Jõululaupäev\n 12-25: [NF] Esimene jõulupüha\n 12-26: [NF] Teine jõulupüha\n 2 days before Easter: [NRV] Suur reede\n Easter: [NRV] Ülestõusmispühade 1. püha\n 49 days after Easter: [NRV] Nelipühade 1. püha\n \"\"\"\n\n locale = \"et-EE\"\n easter_type = EASTER_WESTERN\n" }, { "alpha_fraction": 0.320070743560791, "alphanum_fraction": 0.3648688495159149, "avg_line_length": 21.03896141052246, "blob_id": "3231a6a2d90140603eb4fd7180edc7f46a1b5e2b", "content_id": "7add848fa34bff0ad4dc3f65b2d69d79a6b74692", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3406, "license_type": "permissive", "max_line_length": 53, "num_lines": 154, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2020] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2020-01-01',\n 'description': 'Año Nuevo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-01-06',\n 'description': 'Día de los Reyes Magos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-03-23',\n 'description': 'Día de San José',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-09',\n 'description': 'Jueves Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-10',\n 'description': 'Viernes Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-12',\n 'description': 'Domingo de Pascua',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-01',\n 'description': 'Día del Trabajo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-25',\n 'description': 'La Ascensión del Señor',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-06-15',\n 'description': 'Corpus Christi',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-06-22',\n 'description': 'El Sagrado Corazón de Jesús',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-06-29',\n 'description': 'San Pedro y San Pablo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-07-20',\n 'description': 'Grito de Independencia',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-08-07',\n 'description': 'Batalla de Boyacá',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-08-17',\n 'description': 'Asunción de la Virgen',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-10-12',\n 'description': 'Día de la Raza',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2020-11-02',\n 'description': 'Todos los Santos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-11-16',\n 'description': 'Independencia de Cartagena',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2020-12-08',\n 'description': 'Inmaculada Concepción',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-25',\n 'description': 'Navidad',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.320070743560791, "alphanum_fraction": 0.3648688495159149, "avg_line_length": 21.03896141052246, "blob_id": "df9d0b379b0838e4167399833357816f66e902bf", "content_id": "6ea7aba3454ff5ac64fc666e91ae8af9e4e67339", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3406, "license_type": "permissive", "max_line_length": 53, "num_lines": 154, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': 'Año Nuevo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-01-11',\n 'description': 'Día de los Reyes Magos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-21',\n 'description': 'Día de San José',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Jueves Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-25',\n 'description': 'Viernes Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-27',\n 'description': 'Domingo de Pascua',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-01',\n 'description': 'Día del Trabajo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-09',\n 'description': 'La Ascensión del Señor',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-30',\n 'description': 'Corpus Christi',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-06-06',\n 'description': 'El Sagrado Corazón de Jesús',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-07-04',\n 'description': 'San Pedro y San Pablo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-07-20',\n 'description': 'Grito de Independencia',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-08-07',\n 'description': 'Batalla de Boyacá',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-08-15',\n 'description': 'Asunción de la Virgen',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-10-17',\n 'description': 'Día de la Raza',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2016-11-07',\n 'description': 'Todos los Santos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-11-14',\n 'description': 'Independencia de Cartagena',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2016-12-08',\n 'description': 'Inmaculada Concepción',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-25',\n 'description': 'Navidad',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31142035126686096, "alphanum_fraction": 0.3574855923652649, "avg_line_length": 20.275510787963867, "blob_id": "978e54d2b11fdd33d5d036d269298ffc18226f07", "content_id": "112003a2a839575717f27018217d27dfe08be6f0", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2088, "license_type": "permissive", "max_line_length": 47, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2012] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2012-01-01',\n 'description': 'Nytårsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-04-05',\n 'description': 'Skærtorsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-06',\n 'description': 'Langfredag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-08',\n 'description': 'Påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-09',\n 'description': 'Anden påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-04',\n 'description': 'Store bededag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-17',\n 'description': 'Kristi himmelfartsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-27',\n 'description': 'Pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-28',\n 'description': 'Anden pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-06-05',\n 'description': 'Grundlovsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-12-25',\n 'description': 'Juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-26',\n 'description': 'Anden juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3192431628704071, "alphanum_fraction": 0.3643317222595215, "avg_line_length": 20.798246383666992, "blob_id": "ec41a712f8f38dd32ffb01b85b677a9aa4c4f12e", "content_id": "70e06aaebbe3260de119d2b1282d3e3420ee46aa", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2495, "license_type": "permissive", "max_line_length": 54, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2012] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2012-01-01',\n 'description': 'Ano Novo',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-02-21',\n 'description': 'Carnaval',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-06',\n 'description': 'Sexta-feira Santa',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-08',\n 'description': 'Páscoa',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-25',\n 'description': 'Dia da Liberdade',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-01',\n 'description': 'Dia do Trabalhador',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-06-07',\n 'description': 'Corpo de Deus',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-06-10',\n 'description': 'Dia de Portugal',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-08-15',\n 'description': 'Assunção de Nossa Senhora',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-10-05',\n 'description': 'Implantação da República',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-11-01',\n 'description': 'Dia de Todos os Santos',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-12-01',\n 'description': 'Restauração da Independência',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-12-08',\n 'description': 'Imaculada Conceição',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-12-25',\n 'description': 'Natal',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.329084575176239, "alphanum_fraction": 0.37504827976226807, "avg_line_length": 21.719297409057617, "blob_id": "91d453eeb21ddec887778acffac2eaf57e6e477f", "content_id": "88edca3208f584bb19a48e628fd50500ec60043c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2620, "license_type": "permissive", "max_line_length": 65, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': 'Yılbaşı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-04-21',\n 'description': 'Ramazan Bayramı (1. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-22',\n 'description': 'Ramazan Bayramı (2. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-23',\n 'description': 'Ulusal Egemenlik ve Çocuk Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-04-23',\n 'description': 'Ramazan Bayramı (3. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-01',\n 'description': 'Emek ve Dayanışma Günü',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-19',\n 'description': \"Atatürk'ü Anma, Gençlik ve Spor Bayramı\",\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-06-28',\n 'description': 'Kurban Bayramı (1. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-06-29',\n 'description': 'Kurban Bayramı (2. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-06-30',\n 'description': 'Kurban Bayramı (3. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-07-01',\n 'description': 'Kurban Bayramı (4. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-07-15',\n 'description': 'Demokrasi ve Milli Birlik Günü',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-08-30',\n 'description': 'Zafer Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-10-29',\n 'description': 'Cumhuriyet Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3191489279270172, "alphanum_fraction": 0.3645389974117279, "avg_line_length": 20.59183692932129, "blob_id": "5c9331153c952478f0a7f25185141c14f4accdd5", "content_id": "e852191ff376c754f9707ed79e5d55ed3b690cb1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2115, "license_type": "permissive", "max_line_length": 53, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_BE-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': 'Nieuwjaar',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-04-21',\n 'description': 'Pasen',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Paasmaandag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Dag van de arbeid',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-05-30',\n 'description': 'Onze Lieve Heer hemelvaart',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-09',\n 'description': 'Pinksteren',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Pinkstermaandag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-07-21',\n 'description': 'Nationale feestdag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-08-15',\n 'description': 'Onze Lieve Vrouw hemelvaart',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-11-11',\n 'description': 'Wapenstilstand',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Kerstmis',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3047436475753784, "alphanum_fraction": 0.3545759320259094, "avg_line_length": 20.306121826171875, "blob_id": "660f16a00cb6949b5614c462c544b194de792c4c", "content_id": "eb3f992b81cff4ebb37bf534a772fa13f49dd30c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2093, "license_type": "permissive", "max_line_length": 45, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2012] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2012-01-01',\n 'description': \"Jour de l'an\",\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-04-09',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-01',\n 'description': 'Fête du premier mai',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-08',\n 'description': 'Armistice 1945',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-17',\n 'description': 'Ascension',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-27',\n 'description': 'Pentecôte',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-28',\n 'description': 'Lundi de Pentecôte',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-07-14',\n 'description': 'Fête nationale',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-08-15',\n 'description': 'Assomption',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-11-01',\n 'description': 'Toussaint',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-11-11',\n 'description': 'Armistice 1918',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-12-25',\n 'description': 'Noël',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.5982142686843872, "alphanum_fraction": 0.5982142686843872, "avg_line_length": 15, "blob_id": "6375efea03be843531eda388dbf4e300369d1e46", "content_id": "5be8214dc459b2eb9d60a5fecc23b75897a00a8a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 112, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/SK.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass SK(Country):\n id = \"SK\"\n languages = [\"sk\"]\n default_lang = \"sk\"\n" }, { "alpha_fraction": 0.3200301229953766, "alphanum_fraction": 0.3652108311653137, "avg_line_length": 20.778688430786133, "blob_id": "738366fe5b9deb3eeb585e1c787f97d0911ddd53", "content_id": "8b6132627a85e046756dd2b708a266b0a02658f8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2663, "license_type": "permissive", "max_line_length": 56, "num_lines": 122, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': 'Novo leto',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-01-02',\n 'description': 'Novo leto',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-02-08',\n 'description': 'Prešernov dan',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-04-21',\n 'description': 'Velikonočna nedelja',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Velikonočni ponedeljek',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-27',\n 'description': 'Dan upora proti okupatorju',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Praznik dela',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-05-02',\n 'description': 'Praznik dela',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Binkošti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-25',\n 'description': 'Dan državnosti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-08-15',\n 'description': 'Marijino vnebovzetje',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-10-31',\n 'description': 'Dan reformacije',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-11-01',\n 'description': 'Dan spomina na mrtve',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Božič',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Dan samostojnosti in enotnosti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.30265551805496216, "alphanum_fraction": 0.34921789169311523, "avg_line_length": 20.153846740722656, "blob_id": "d8d7f3704e095f9150ef9baaa8f54d45d3502bd9", "content_id": "4a9fd60dd48c73018a713e25d59a6c4a3bbd15fc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2753, "license_type": "permissive", "max_line_length": 42, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Nyårsdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-01-06',\n 'description': 'Trettondedag jul',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Långfredagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-04',\n 'description': 'Påskdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Annandag påsk',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Första maj',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-13',\n 'description': 'Kristi himmelsfärdsdag',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-23',\n 'description': 'Pingstdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-06-06',\n 'description': 'Nationaldagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-06-25',\n 'description': 'Midsommarafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-06-26',\n 'description': 'Midsommardagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-11-06',\n 'description': 'Alla helgons dag',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-12-24',\n 'description': 'Julafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Juldagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Annandag jul',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-31',\n 'description': 'Nyårsafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.303668737411499, "alphanum_fraction": 0.35016345977783203, "avg_line_length": 20.184616088867188, "blob_id": "5eac99c1dc6198a7b33370225c1eb71d10fdc34f", "content_id": "608575edd22d1332a6c9eef9bed4568532b6f17c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2757, "license_type": "permissive", "max_line_length": 42, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': 'Nyårsdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-01-06',\n 'description': 'Trettondedag jul',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Långfredagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-21',\n 'description': 'Påskdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Annandag påsk',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Första maj',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-05-30',\n 'description': 'Kristi himmelsfärdsdag',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-06',\n 'description': 'Nationaldagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-06-09',\n 'description': 'Pingstdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-21',\n 'description': 'Midsommarafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2019-06-22',\n 'description': 'Midsommardagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2019-11-02',\n 'description': 'Alla helgons dag',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-12-24',\n 'description': 'Julafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Juldagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Annandag jul',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-31',\n 'description': 'Nyårsafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31142035126686096, "alphanum_fraction": 0.3574855923652649, "avg_line_length": 20.275510787963867, "blob_id": "1ccc514a17a3373c8d843974c50a71622ae4d789", "content_id": "a1ee24a4806294e476a65cff56058840da520444", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2088, "license_type": "permissive", "max_line_length": 47, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Nytårsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-04-17',\n 'description': 'Skærtorsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Langfredag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-20',\n 'description': 'Påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Anden påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-05-16',\n 'description': 'Store bededag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-05-29',\n 'description': 'Kristi himmelfartsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-05',\n 'description': 'Grundlovsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-06-08',\n 'description': 'Pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Anden pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Anden juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3186638355255127, "alphanum_fraction": 0.3643445670604706, "avg_line_length": 20.852941513061523, "blob_id": "60b0400ca2f0cc989ee9f26c28a713d3d33dc84c", "content_id": "714f679b3e54ae0f507f7e5f7b75c3814570eb6d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 24547, "license_type": "permissive", "max_line_length": 45, "num_lines": 1122, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_CH-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Neujahrstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-01-02',\n 'description': 'Berchtoldstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BE',\n 'type': 'F'\n },\n {\n 'date': '2014-01-02',\n 'description': 'Berchtoldstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'F'\n },\n {\n 'date': '2014-01-02',\n 'description': 'Berchtoldstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'F'\n },\n {\n 'date': '2014-01-02',\n 'description': 'Berchtoldstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VD',\n 'type': 'F'\n },\n {\n 'date': '2014-01-06',\n 'description': 'Heilige Drei Könige',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2014-01-06',\n 'description': 'Heilige Drei Könige',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2014-01-06',\n 'description': 'Heilige Drei Könige',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2014-03-19',\n 'description': 'Josefstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RF'\n },\n {\n 'date': '2014-03-19',\n 'description': 'Josefstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2014-03-19',\n 'description': 'Josefstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2014-03-19',\n 'description': 'Josefstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2014-03-19',\n 'description': 'Josefstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VS',\n 'type': 'RF'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AG',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BE',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BL',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BS',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'FR',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GE',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GL',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GR',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NE',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'OW',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SG',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SH',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SO',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VD',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZG',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZH',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-20',\n 'description': 'Ostersonntag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BE',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BL',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BS',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GE',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GL',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GR',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SG',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SH',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VD',\n 'type': 'RV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZH',\n 'type': 'RV'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BL',\n 'type': 'F'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BS',\n 'type': 'F'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GR',\n 'type': 'F'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NE',\n 'type': 'F'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SH',\n 'type': 'F'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'F'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'F'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZH',\n 'type': 'F'\n },\n {\n 'date': '2014-05-29',\n 'description': 'Auffahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-08',\n 'description': 'Pfingstsonntag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BE',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BL',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BS',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GE',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GL',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GR',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SG',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SH',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VD',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZH',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-19',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-19',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-19',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-19',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-19',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'OW',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-19',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-19',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-19',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-19',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VS',\n 'type': 'RV'\n },\n {\n 'date': '2014-06-19',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZG',\n 'type': 'RV'\n },\n {\n 'date': '2014-08-01',\n 'description': 'Bundesfeier',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RF'\n },\n {\n 'date': '2014-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RF'\n },\n {\n 'date': '2014-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RF'\n },\n {\n 'date': '2014-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RF'\n },\n {\n 'date': '2014-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'OW',\n 'type': 'RF'\n },\n {\n 'date': '2014-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2014-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2014-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2014-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VS',\n 'type': 'RF'\n },\n {\n 'date': '2014-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZG',\n 'type': 'RF'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RF'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GL',\n 'type': 'RF'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RF'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RF'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RF'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'OW',\n 'type': 'RF'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SG',\n 'type': 'RF'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VS',\n 'type': 'RF'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZG',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'OW',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VS',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZG',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Weihnachtstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BE',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BL',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BS',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GL',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GR',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SG',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SH',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZH',\n 'type': 'RF'\n }\n]" }, { "alpha_fraction": 0.3192431628704071, "alphanum_fraction": 0.3643317222595215, "avg_line_length": 20.798246383666992, "blob_id": "550e3ce3991801b0611b284493449b23c94198aa", "content_id": "8144b9c211e57e042235ce4671a06b32eebea43a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2495, "license_type": "permissive", "max_line_length": 54, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': 'Ano Novo',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-02-12',\n 'description': 'Carnaval',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-03-29',\n 'description': 'Sexta-feira Santa',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-03-31',\n 'description': 'Páscoa',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-04-25',\n 'description': 'Dia da Liberdade',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-01',\n 'description': 'Dia do Trabalhador',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-30',\n 'description': 'Corpo de Deus',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-06-10',\n 'description': 'Dia de Portugal',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-08-15',\n 'description': 'Assunção de Nossa Senhora',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-10-05',\n 'description': 'Implantação da República',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-11-01',\n 'description': 'Dia de Todos os Santos',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-12-01',\n 'description': 'Restauração da Independência',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-12-08',\n 'description': 'Imaculada Conceição',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-12-25',\n 'description': 'Natal',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.47593581676483154, "alphanum_fraction": 0.47593581676483154, "avg_line_length": 25.714284896850586, "blob_id": "45a3a7c7466756c07d475ae22a1ca714b4b38ccf", "content_id": "599e6d75993fb01c8eaea74f2228f6d2fd507798", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 187, "license_type": "permissive", "max_line_length": 92, "num_lines": 7, "path": "/src/holidata/holidays/CA.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass CA(Country):\n id = \"CA\"\n languages = [\"en\", \"fr\"]\n regions = [\"AB\", \"BC\", \"MB\", \"NB\", \"NL\", \"NS\", \"ON\", \"PE\", \"QC\", \"SK\", \"NT\", \"NU\", \"YT\"]\n" }, { "alpha_fraction": 0.31993502378463745, "alphanum_fraction": 0.3654080331325531, "avg_line_length": 20.61403465270996, "blob_id": "0757bc7d4e9d8eeaa5614fae7aa7308bac0170f6", "content_id": "20acc1751ff70f915b7ab3351a695731c935021e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2463, "license_type": "permissive", "max_line_length": 44, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_NL-2020] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2020-01-01',\n 'description': 'Nieuwjaarsdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-04-10',\n 'description': 'Goede Vrijdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-12',\n 'description': 'Eerste Paasdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Tweede Paasdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-27',\n 'description': 'Koningsdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2020-05-04',\n 'description': 'Dodenherdenking',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-05',\n 'description': 'Bevrijdingsdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-21',\n 'description': 'Hemelvaartsdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-31',\n 'description': 'Eerste Pinksterdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-06-01',\n 'description': 'Tweede Pinksterdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-12-05',\n 'description': 'Sinterklaas',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-15',\n 'description': 'Koninkrijksdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2020-12-25',\n 'description': 'Eerste Kerstdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-26',\n 'description': 'Tweede Kerstdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31302428245544434, "alphanum_fraction": 0.35894039273262024, "avg_line_length": 20.37735939025879, "blob_id": "6d8c215d819fdcee4db7209a3fc8519171b942f8", "content_id": "d4f1af4d6f27196f12dff6e747889dec00de7e75", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2273, "license_type": "permissive", "max_line_length": 47, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2020] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2020-01-01',\n 'description': 'Nyårsdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-01-06',\n 'description': 'Trettondedagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-04-10',\n 'description': 'Långfredagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-12',\n 'description': 'Påskdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Annandag påsk',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-01',\n 'description': 'Första maj',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-21',\n 'description': 'Kristi himmelfärdsdag',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-31',\n 'description': 'Pingst',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-06-20',\n 'description': 'Midsommardagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-10-31',\n 'description': 'Alla helgons dag',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-12-06',\n 'description': 'Självständighetsdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-12-25',\n 'description': 'Juldagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-26',\n 'description': 'Annandag jul',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3218020796775818, "alphanum_fraction": 0.36685439944267273, "avg_line_length": 20.8157901763916, "blob_id": "16c6d0ae8d29e8ccac06be7c43ee2d89f6b40077", "content_id": "34527658e6a1a79df8afa658e95e0b159240b307", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2493, "license_type": "permissive", "max_line_length": 56, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': 'Novo leto',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-02-08',\n 'description': 'Prešernov dan',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-03-27',\n 'description': 'Velikonočna nedelja',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-28',\n 'description': 'Velikonočni ponedeljek',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-04-27',\n 'description': 'Dan upora proti okupatorju',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-01',\n 'description': 'Praznik dela',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-02',\n 'description': 'Praznik dela',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-16',\n 'description': 'Binkošti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-06-25',\n 'description': 'Dan državnosti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-08-15',\n 'description': 'Marijino vnebovzetje',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-10-31',\n 'description': 'Dan reformacije',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-11-01',\n 'description': 'Dan spomina na mrtve',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-12-25',\n 'description': 'Božič',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'Dan samostojnosti in enotnosti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.34026622772216797, "alphanum_fraction": 0.3843593895435333, "avg_line_length": 21.688678741455078, "blob_id": "11c2535376405fc6c6e049c9ca1a06b34945fac4", "content_id": "d619f640d77d08eec2786fbe5896d8121f2bf602", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2443, "license_type": "permissive", "max_line_length": 72, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': 'Nový rok',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-01-01',\n 'description': 'Den obnovy samostatného českého státu',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-04-06',\n 'description': 'Velikonoční pondělí',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-01',\n 'description': 'Svátek práce',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-08',\n 'description': 'Den vítězství',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-07-05',\n 'description': 'Den slovanských věrozvěstů Cyrila a Metoděje',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-07-06',\n 'description': 'Den upálení mistra Jana Husa',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-09-28',\n 'description': 'Den české státnosti',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-10-28',\n 'description': 'Den vzniku samostatného československého státu',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-11-17',\n 'description': 'Den boje za svobodu a demokracii',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-24',\n 'description': 'Štědrý den',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-25',\n 'description': '1. svátek vánoční',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-26',\n 'description': '2. svátek vánoční',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.42281877994537354, "alphanum_fraction": 0.42281877994537354, "avg_line_length": 32.11111068725586, "blob_id": "2378405723df69722e2a15afef2483c0e320fe13", "content_id": "bf56aadc15d8ab607f97d311354e5f5a5cbb7f18", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 298, "license_type": "permissive", "max_line_length": 116, "num_lines": 9, "path": "/src/holidata/holidays/CH.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass CH(Country):\n id = \"CH\"\n languages = [\"de\"]\n default_lang = \"de\"\n regions = [\"AG\", \"AI\", \"AR\", \"BE\", \"BL\", \"BS\", \"FR\", \"GE\", \"GL\", \"GR\", \"JU\", \"LU\", \"NE\", \"NW\", \"OW\", \"SG\", \"SH\",\n \"SO\", \"SZ\", \"TI\", \"TG\", \"UR\", \"VD\", \"VS\", \"ZG\", \"ZH\"]\n" }, { "alpha_fraction": 0.30328208208084106, "alphanum_fraction": 0.35189032554626465, "avg_line_length": 20.305309295654297, "blob_id": "dcc1e87ba3fb0c3efd8f1aed9180628e38131181", "content_id": "254480efeefa98640636e5c5ec13d1a0b89f6532", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4817, "license_type": "permissive", "max_line_length": 49, "num_lines": 226, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2020] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2020-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-01-06',\n 'description': 'Heilige drei Könige',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '2',\n 'type': 'RF'\n },\n {\n 'date': '2020-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '6',\n 'type': 'RF'\n },\n {\n 'date': '2020-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '7',\n 'type': 'RF'\n },\n {\n 'date': '2020-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '8',\n 'type': 'RF'\n },\n {\n 'date': '2020-04-10',\n 'description': 'Karfreitag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-12',\n 'description': 'Ostersonntag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Ostermontag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-01',\n 'description': 'Staatsfeiertag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-04',\n 'description': 'Florian',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '4',\n 'type': 'F'\n },\n {\n 'date': '2020-05-21',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-31',\n 'description': 'Pfingstsonntag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-06-01',\n 'description': 'Pfingstmontag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-06-11',\n 'description': 'Fronleichnam',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-09-24',\n 'description': 'Rupert',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '5',\n 'type': 'F'\n },\n {\n 'date': '2020-10-10',\n 'description': 'Tag der Volksabstimmung',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '2',\n 'type': 'F'\n },\n {\n 'date': '2020-10-26',\n 'description': 'Nationalfeiertag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-11-11',\n 'description': 'Martin',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '1',\n 'type': 'F'\n },\n {\n 'date': '2020-11-15',\n 'description': 'Leopold',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '9',\n 'type': 'F'\n },\n {\n 'date': '2020-11-15',\n 'description': 'Leopold',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '3',\n 'type': 'F'\n },\n {\n 'date': '2020-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-24',\n 'description': 'Heiliger Abend',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-25',\n 'description': 'Christtag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-26',\n 'description': 'Stefanitag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-12-31',\n 'description': 'Silvester',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.5982142686843872, "alphanum_fraction": 0.5982142686843872, "avg_line_length": 15, "blob_id": "87bf20aab51eb30d55f70ef859d23f7c842b3bc2", "content_id": "66ffba2bdc5cf87e4b5d013f09d365aacd83eb93", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 112, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/HR.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass HR(Country):\n id = \"HR\"\n languages = [\"hr\"]\n default_lang = \"hr\"\n" }, { "alpha_fraction": 0.31993502378463745, "alphanum_fraction": 0.3654080331325531, "avg_line_length": 20.61403465270996, "blob_id": "04ad2d9e37963e8562091004ee91e76721ae61c6", "content_id": "c5f95cf81384839babbec4b3d946bfd2d9863523", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2463, "license_type": "permissive", "max_line_length": 44, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_NL-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': 'Nieuwjaarsdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-03-25',\n 'description': 'Goede Vrijdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-27',\n 'description': 'Eerste Paasdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-28',\n 'description': 'Tweede Paasdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-04-27',\n 'description': 'Koningsdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2016-05-04',\n 'description': 'Dodenherdenking',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-05',\n 'description': 'Bevrijdingsdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-05',\n 'description': 'Hemelvaartsdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-15',\n 'description': 'Eerste Pinksterdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-16',\n 'description': 'Tweede Pinksterdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-12-05',\n 'description': 'Sinterklaas',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-15',\n 'description': 'Koninkrijksdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2016-12-25',\n 'description': 'Eerste Kerstdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'Tweede Kerstdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.5259159803390503, "alphanum_fraction": 0.5540661215782166, "avg_line_length": 29.657533645629883, "blob_id": "63b959826f3f9b76239249e89da5bfb3d3a45755", "content_id": "8a085b50c92587915c6767b363e8d2c6b69c82e4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2266, "license_type": "permissive", "max_line_length": 82, "num_lines": 73, "path": "/src/holidata/holidays/is-IS.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from dateutil.easter import EASTER_WESTERN\n\nfrom holidata.utils import SmartDayArrow\nfrom .holidays import Holiday, Locale\n\n\nclass is_IS(Locale):\n \"\"\"\n 01-01: [NRF] Nýársdagur\n 05-01: [NF] Verkalýðsdagurinn\n 06-17: [NF] Þjóðhátíðardagurinn\n 12-25: [NRF] Jóladagur\n 12-26: [NRF] Annar dagur jóla\n 3 days before Easter: [NRV] Skírdagur\n 2 days before Easter: [NRV] Föstudagurinn langi\n Easter: [NRV] Páskadagur\n 1 day after Easter: [NRV] Annar dagur páska\n 39 days after Easter: [NRV] Uppstigningardagur\n 49 days after Easter: [NRV] Hvítasunnudagur\n 50 days after Easter: [NRV] Annar dagur hvítasunnu\n 1. monday in August: [NV] Frídagur verslunarmanna\n \"\"\"\n\n locale = \"is-IS\"\n easter_type = EASTER_WESTERN\n\n def holiday_first_day_of_summer(self):\n \"\"\"\n Calculate sumardagurinn fyrsti (first day of summer).\n\n The holiday falls on the first Thursday after the 18th of April.\n \"\"\"\n return [\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 4, 18).shift_to_weekday(\"thursday\"),\n description=\"Sumardagurinn fyrsti\",\n flags=\"NV\",\n notes=\"\",\n )\n ]\n\n def holiday_half_days(self):\n \"\"\"\n Define half-day holidays.\n\n Both Christmas Eve (_aðfangadagur jóla_) and New Year's Eve\n (_gamlársdagur_) are public holidays in Iceland from 13:00 only.\n They're included as full-day holidays, but with an explanatory\n note.\n\n 12-24: [NRF] Aðfangadagur jóla\n 12-31: [NF] Gamlársdagur\n \"\"\"\n return [\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 12, 24),\n description=\"Aðfangadagur jóla\",\n flags=\"NRF\",\n notes=\"Holiday from 13:00\",\n ),\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 12, 31),\n description=\"Gamlársdagur\",\n flags=\"NF\",\n notes=\"Holiday from 13:00\",\n ),\n ]\n" }, { "alpha_fraction": 0.320070743560791, "alphanum_fraction": 0.3648688495159149, "avg_line_length": 21.03896141052246, "blob_id": "97266a83b65073b1b3fd98d52f7feaa667e59985", "content_id": "c8f1f9b38273a1b70bb77ebbc5d93f85d9af99f8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3406, "license_type": "permissive", "max_line_length": 53, "num_lines": 154, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': 'Año Nuevo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-01-07',\n 'description': 'Día de los Reyes Magos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-03-25',\n 'description': 'Día de San José',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-18',\n 'description': 'Jueves Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Viernes Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-21',\n 'description': 'Domingo de Pascua',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Día del Trabajo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-06-03',\n 'description': 'La Ascensión del Señor',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-24',\n 'description': 'Corpus Christi',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-07-01',\n 'description': 'El Sagrado Corazón de Jesús',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-07-01',\n 'description': 'San Pedro y San Pablo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-07-20',\n 'description': 'Grito de Independencia',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-08-07',\n 'description': 'Batalla de Boyacá',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-08-19',\n 'description': 'Asunción de la Virgen',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-10-14',\n 'description': 'Día de la Raza',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2019-11-04',\n 'description': 'Todos los Santos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-11-11',\n 'description': 'Independencia de Cartagena',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2019-12-08',\n 'description': 'Inmaculada Concepción',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Navidad',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.329084575176239, "alphanum_fraction": 0.37504827976226807, "avg_line_length": 21.719297409057617, "blob_id": "5b2d79a2c59c3ff861b75c613334ba9b3985900a", "content_id": "8106572551361cdd00bd2ef48faf728c70cf35aa", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2620, "license_type": "permissive", "max_line_length": 65, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': 'Yılbaşı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-04-23',\n 'description': 'Ulusal Egemenlik ve Çocuk Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Emek ve Dayanışma Günü',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-05-19',\n 'description': \"Atatürk'ü Anma, Gençlik ve Spor Bayramı\",\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-06-05',\n 'description': 'Ramazan Bayramı (1. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-06',\n 'description': 'Ramazan Bayramı (2. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-07',\n 'description': 'Ramazan Bayramı (3. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-07-15',\n 'description': 'Demokrasi ve Milli Birlik Günü',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-08-11',\n 'description': 'Kurban Bayramı (1. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-08-12',\n 'description': 'Kurban Bayramı (2. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-08-13',\n 'description': 'Kurban Bayramı (3. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-08-14',\n 'description': 'Kurban Bayramı (4. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-08-30',\n 'description': 'Zafer Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-10-29',\n 'description': 'Cumhuriyet Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3191857933998108, "alphanum_fraction": 0.36422693729400635, "avg_line_length": 20.79245376586914, "blob_id": "1dce5db1a6c57d87a13c19854a685a9259033286", "content_id": "7f63b00048fae2cec3923f0295e10b2522eafd7e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2315, "license_type": "permissive", "max_line_length": 89, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hr_HR-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': 'Nova Godina',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-01-06',\n 'description': 'Sveta tri kralja',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-03-31',\n 'description': 'Uskrs',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Uskršnji ponedjeljak',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-01',\n 'description': 'Praznik rada',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-30',\n 'description': 'Dan državnosti',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-30',\n 'description': 'Tijelovo',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-06-22',\n 'description': 'Dan antifašističke borbe',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-08-05',\n 'description': 'Dan pobjede i domovinske zahvalnosti i Dan hrvatskih branitelja',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-08-15',\n 'description': 'Velika Gospa',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-11-01',\n 'description': 'Dan svih svetih',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-25',\n 'description': 'Božić',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-26',\n 'description': 'Sveti Stjepan',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3191489279270172, "alphanum_fraction": 0.3645389974117279, "avg_line_length": 20.59183692932129, "blob_id": "4836bfc9492ff9460014e2dd2e7bbb2e2cfa662b", "content_id": "0061ac198b997dc0e2ddc0760890df6a7a8a5857", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2115, "license_type": "permissive", "max_line_length": 53, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_BE-2012] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2012-01-01',\n 'description': 'Nieuwjaar',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-04-08',\n 'description': 'Pasen',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-09',\n 'description': 'Paasmaandag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-01',\n 'description': 'Dag van de arbeid',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-17',\n 'description': 'Onze Lieve Heer hemelvaart',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-27',\n 'description': 'Pinksteren',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-28',\n 'description': 'Pinkstermaandag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-07-21',\n 'description': 'Nationale feestdag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-08-15',\n 'description': 'Onze Lieve Vrouw hemelvaart',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-11-11',\n 'description': 'Wapenstilstand',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-12-25',\n 'description': 'Kerstmis',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.32209739089012146, "alphanum_fraction": 0.3670412003993988, "avg_line_length": 20.66216278076172, "blob_id": "dcfa3b6e9d6f32dee51ff68ed13603e0db6d455b", "content_id": "93a6e298fde3083f82f2355a55233ba5b09b2180", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1738, "license_type": "permissive", "max_line_length": 52, "num_lines": 74, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2018] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2018-01-01',\n 'description': 'Новый Год',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-01-07',\n 'description': 'Рождество Христово',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-02-23',\n 'description': 'День защитника Отечества',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-03-08',\n 'description': 'Международный женский день',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-04-08',\n 'description': 'Пасха',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-05-01',\n 'description': 'Праздник весны и труда',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-05-09',\n 'description': 'День Победы',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-06-12',\n 'description': 'День России',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-11-04',\n 'description': 'День народного единства',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.32104969024658203, "alphanum_fraction": 0.36571747064590454, "avg_line_length": 20.85365867614746, "blob_id": "0ed2fec1ce8971ab98fc3c0412508fd54bbd28f5", "content_id": "b1b350c931bd45144d9b3afaa1709e458493d422", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1791, "license_type": "permissive", "max_line_length": 56, "num_lines": 82, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-01-02',\n 'description': \"New Year's Day (observed)\",\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-04-07',\n 'description': 'Good Friday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-10',\n 'description': 'Easter Monday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-01',\n 'description': 'Early May Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-05-08',\n 'description': 'Coronation of King Charles III',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-05-29',\n 'description': 'Spring Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-08-28',\n 'description': 'August Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-26',\n 'description': 'Boxing Day',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31208688020706177, "alphanum_fraction": 0.3574126660823822, "avg_line_length": 20.62244987487793, "blob_id": "fb638b271b05e84478e114a11efb800199f3a595", "content_id": "5d1bfdb1ecfa3b6bfe37284d869ae4964cf9dda1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2118, "license_type": "permissive", "max_line_length": 51, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-01-03',\n 'description': \"New Year's Day (observed)\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2011-01-04',\n 'description': \"Day after New Year's Day\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2011-02-06',\n 'description': 'Waitangi Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-04-22',\n 'description': 'Good Friday',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-25',\n 'description': 'ANZAC Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-04-25',\n 'description': 'Easter Monday',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-06',\n 'description': \"Queen's Birthday\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2011-10-24',\n 'description': 'Labour Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-26',\n 'description': 'Boxing Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-12-27',\n 'description': 'Christmas Day (observed)',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n }\n]" }, { "alpha_fraction": 0.5982142686843872, "alphanum_fraction": 0.5982142686843872, "avg_line_length": 15, "blob_id": "9c82e3e57b84d69c10937919a2efb52bb0e11de0", "content_id": "ede3475c93bfb0dce2c64a042d84a580cd785b45", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 112, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/SI.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass SI(Country):\n id = \"SI\"\n languages = [\"sl\"]\n default_lang = \"sl\"\n" }, { "alpha_fraction": 0.3056092858314514, "alphanum_fraction": 0.3520309329032898, "avg_line_length": 20.112245559692383, "blob_id": "04da3c5c2f1b5848550ab75996495f88b4092d7a", "content_id": "413e5fa792506f65b032fa7f4abf750a2949ecb7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2068, "license_type": "permissive", "max_line_length": 42, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-04-09',\n 'description': 'Ostern',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-10',\n 'description': 'Ostermontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-18',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-28',\n 'description': 'Pfingsten',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-29',\n 'description': 'Pfingstmontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-07-21',\n 'description': 'Nationalfeiertag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-11-11',\n 'description': 'Waffenstillstand',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Weihnacht',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.32057416439056396, "alphanum_fraction": 0.3652312457561493, "avg_line_length": 21.008771896362305, "blob_id": "1c9609ebe9b7c7953215580cbf0f1d29967339e5", "content_id": "34b394a124a934a3c119c420fd40e37be15b24d5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2524, "license_type": "permissive", "max_line_length": 58, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_US-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': 'Año Neuvo',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-01-21',\n 'description': 'Cumpleaños de Martin Luther King, Jr.',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2019-02-18',\n 'description': 'Día del Presidente',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2019-04-15',\n 'description': 'Día del Patriota',\n 'locale': 'es-US',\n 'notes': '',\n 'region': 'MA',\n 'type': 'V'\n },\n {\n 'date': '2019-04-15',\n 'description': 'Día del Patriota',\n 'locale': 'es-US',\n 'notes': '',\n 'region': 'ME',\n 'type': 'V'\n },\n {\n 'date': '2019-05-27',\n 'description': 'Día de los Caídos',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2019-07-04',\n 'description': 'Día de la Independiencia',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-09-02',\n 'description': 'Día del Trabajo',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2019-10-14',\n 'description': 'Día de Columbus',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2019-11-11',\n 'description': 'Día de los Veteranos',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-11-22',\n 'description': 'Día después de Acción de Gracias',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2019-11-28',\n 'description': 'Día de Acción de Gracias',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2019-12-24',\n 'description': 'Nochebuena',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Navidad',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.30519479513168335, "alphanum_fraction": 0.35304170846939087, "avg_line_length": 20.21014404296875, "blob_id": "af091aa830426e6fa8d5401c5bf2268a0587c30d", "content_id": "7c02458ffa9fb8f7155dc58c094a00f64047a2f7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2934, "license_type": "permissive", "max_line_length": 46, "num_lines": 138, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Nyttårsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-02-14',\n 'description': 'Fastelavn',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-03-28',\n 'description': 'Palmesøndag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-01',\n 'description': 'Skjærtorsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Langfredag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-04',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Offentlig Høytidsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-08',\n 'description': 'Frigjøringsdag 1945',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-13',\n 'description': 'Kristi Himmelfartsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-17',\n 'description': 'Grunnlovsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-23',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-12-24',\n 'description': 'Julaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-31',\n 'description': 'Nyttårsaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3047436475753784, "alphanum_fraction": 0.3545759320259094, "avg_line_length": 20.306121826171875, "blob_id": "38575cfce0d86eef286f0c03fccebb620afd922f", "content_id": "b011d29230a69806717a74e817c247194ac2d92e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2093, "license_type": "permissive", "max_line_length": 45, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2017] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2017-01-01',\n 'description': \"Jour de l'an\",\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-04-17',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-05-01',\n 'description': 'Fête du premier mai',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-05-08',\n 'description': 'Armistice 1945',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-05-25',\n 'description': 'Ascension',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-04',\n 'description': 'Pentecôte',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-05',\n 'description': 'Lundi de Pentecôte',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-07-14',\n 'description': 'Fête nationale',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-08-15',\n 'description': 'Assomption',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-11-01',\n 'description': 'Toussaint',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-11-11',\n 'description': 'Armistice 1918',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-12-25',\n 'description': 'Noël',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3190661370754242, "alphanum_fraction": 0.3640294075012207, "avg_line_length": 20.830188751220703, "blob_id": "d036ff42dffb7bf898fa03708729ab27a057e2a9", "content_id": "4ed6d75783361deb035b9e50a137bd9cda042694", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2313, "license_type": "permissive", "max_line_length": 60, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2018] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2018-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-03-21',\n 'description': 'Human Rights Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-03-30',\n 'description': 'Good Friday',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-02',\n 'description': 'Family Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-27',\n 'description': 'Freedom Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-05-01',\n 'description': \"Worker's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-06-16',\n 'description': 'Youth Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-08-09',\n 'description': \"National Women's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-09-24',\n 'description': 'Heritage Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-12-16',\n 'description': 'Day of Reconciliation',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-12-17',\n 'description': 'Day of Reconciliation (Supplement)',\n 'locale': 'en-ZA',\n 'notes': 'Supplement holiday',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-12-26',\n 'description': 'Day of Goodwill',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.30328208208084106, "alphanum_fraction": 0.35189032554626465, "avg_line_length": 20.305309295654297, "blob_id": "261712b05d720fb69800af84477cc67bc5ae714c", "content_id": "52729d99311c98a93fa6dc9c5085253ac2c5f8fc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4817, "license_type": "permissive", "max_line_length": 49, "num_lines": 226, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-01-06',\n 'description': 'Heilige drei Könige',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '2',\n 'type': 'RF'\n },\n {\n 'date': '2023-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '6',\n 'type': 'RF'\n },\n {\n 'date': '2023-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '7',\n 'type': 'RF'\n },\n {\n 'date': '2023-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '8',\n 'type': 'RF'\n },\n {\n 'date': '2023-04-07',\n 'description': 'Karfreitag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-09',\n 'description': 'Ostersonntag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-10',\n 'description': 'Ostermontag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-01',\n 'description': 'Staatsfeiertag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-04',\n 'description': 'Florian',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '4',\n 'type': 'F'\n },\n {\n 'date': '2023-05-18',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-28',\n 'description': 'Pfingstsonntag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-29',\n 'description': 'Pfingstmontag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-06-08',\n 'description': 'Fronleichnam',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-09-24',\n 'description': 'Rupert',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '5',\n 'type': 'F'\n },\n {\n 'date': '2023-10-10',\n 'description': 'Tag der Volksabstimmung',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '2',\n 'type': 'F'\n },\n {\n 'date': '2023-10-26',\n 'description': 'Nationalfeiertag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-11-11',\n 'description': 'Martin',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '1',\n 'type': 'F'\n },\n {\n 'date': '2023-11-15',\n 'description': 'Leopold',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '9',\n 'type': 'F'\n },\n {\n 'date': '2023-11-15',\n 'description': 'Leopold',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '3',\n 'type': 'F'\n },\n {\n 'date': '2023-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-24',\n 'description': 'Heiliger Abend',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Christtag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-26',\n 'description': 'Stefanitag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-12-31',\n 'description': 'Silvester',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.5053921341896057, "alphanum_fraction": 0.5823529362678528, "avg_line_length": 29.44776153564453, "blob_id": "d547a1ccd2e6444c34f6f0267c8311246272559c", "content_id": "6e5f41ded6874f62344f5315e20ba45b7a7c477a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2089, "license_type": "permissive", "max_line_length": 89, "num_lines": 67, "path": "/src/holidata/holidays/cs-CZ.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom holidata.utils import easter, SmartDayArrow\nfrom .holidays import Locale, Holiday\n\n\"\"\"\nsource: https://www.zakonyprolidi.cz/cs/2000-245, §1 and §2.\n https://www.zakonyprolidi.cz/cs/2000-245/zneni-20200201 (>2020-02-01)\n https://www.zakonyprolidi.cz/cs/2000-245/zneni-20190401 (>2019-04-01)\n https://www.zakonyprolidi.cz/cs/2000-245/zneni-0 (>2000-08-09)\n\"\"\"\n\n\nclass cs_CZ(Locale):\n \"\"\"\n 01-01: [NF] Nový rok\n 01-01: [NF] Den obnovy samostatného českého státu\n 05-01: [NF] Svátek práce\n 05-08: [NF] Den vítězství\n 07-05: [NRF] Den slovanských věrozvěstů Cyrila a Metoděje\n 07-06: [NRF] Den upálení mistra Jana Husa\n 09-28: [NRF] Den české státnosti\n 10-28: [NF] Den vzniku samostatného československého státu\n 12-24: [NRF] Štědrý den\n 12-25: [NRF] 1. svátek vánoční\n 12-26: [NRF] 2. svátek vánoční\n 1 day after Easter: [NRV] Velikonoční pondělí\n \"\"\"\n\n locale = \"cs-CZ\"\n easter_type = EASTER_WESTERN\n\n def holiday_velky_patek(self):\n \"\"\"\n 2 days before Easter: [NRV] Velký pátek\n since 2016\n \"\"\"\n if self.year >= 2016:\n return [Holiday(\n self.locale,\n \"\",\n easter(self.year, self.easter_type).shift(days=-2),\n \"Velký pátek\",\n \"NRV\"\n )]\n\n return []\n\n def holiday_den_boje_za_svobodu_a_demokracii_a_mezinarodni_den_studentstva(self):\n \"\"\"\n 11-17: [NF]\n before 2019-04-01: Den boje za svobodu a demokracii\n before 2019-04-01: Den boje za svobodu a demokracii a Mezinárodní den studentstva\n \"\"\"\n if self.year < 2019:\n name = \"Den boje za svobodu a demokracii\"\n else:\n name = \"Den boje za svobodu a demokracii a Mezinárodní den studentstva\"\n\n return [Holiday(\n self.locale,\n \"\",\n SmartDayArrow(self.year, 11, 17),\n name,\n \"NF\"\n )]\n" }, { "alpha_fraction": 0.5982142686843872, "alphanum_fraction": 0.5982142686843872, "avg_line_length": 15, "blob_id": "14d75232acc5aee62f221f30fb14420782b0c993", "content_id": "3107343f3c3f14912e2a655b5c3e3245ba43b19b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 112, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/DK.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass DK(Country):\n id = \"DK\"\n languages = [\"da\"]\n default_lang = \"da\"\n" }, { "alpha_fraction": 0.32822832465171814, "alphanum_fraction": 0.37556561827659607, "avg_line_length": 21.10769271850586, "blob_id": "3ccba7375f48b9fb24ae27aab71400a8d7e28a02", "content_id": "e45ddffd7b22227e96d5ae898f6dae40d2cc21f7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2894, "license_type": "permissive", "max_line_length": 49, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Nýársdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-04-14',\n 'description': 'Skírdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Föstudagurinn langi',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-17',\n 'description': 'Páskadagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Annar dagur páska',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-21',\n 'description': 'Sumardagurinn fyrsti',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Verkalýðsdagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-26',\n 'description': 'Uppstigningardagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-05',\n 'description': 'Hvítasunnudagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Annar dagur hvítasunnu',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-17',\n 'description': 'Þjóðhátíðardagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-08-01',\n 'description': 'Frídagur verslunarmanna',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-12-24',\n 'description': 'Aðfangadagur jóla',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Jóladagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Annar dagur jóla',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-31',\n 'description': 'Gamlársdagur',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3192431628704071, "alphanum_fraction": 0.3643317222595215, "avg_line_length": 20.798246383666992, "blob_id": "6ff28c620fd22627f7796c77e7afff66940884c9", "content_id": "a762c03153adcfc0b53a7519a258c80017285ffc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2495, "license_type": "permissive", "max_line_length": 54, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2017] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2017-01-01',\n 'description': 'Ano Novo',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-02-28',\n 'description': 'Carnaval',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-14',\n 'description': 'Sexta-feira Santa',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-16',\n 'description': 'Páscoa',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-25',\n 'description': 'Dia da Liberdade',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-05-01',\n 'description': 'Dia do Trabalhador',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-06-10',\n 'description': 'Dia de Portugal',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-06-15',\n 'description': 'Corpo de Deus',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-08-15',\n 'description': 'Assunção de Nossa Senhora',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-10-05',\n 'description': 'Implantação da República',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-11-01',\n 'description': 'Dia de Todos os Santos',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-12-01',\n 'description': 'Restauração da Independência',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-12-08',\n 'description': 'Imaculada Conceição',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-12-25',\n 'description': 'Natal',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.5982142686843872, "alphanum_fraction": 0.5982142686843872, "avg_line_length": 15, "blob_id": "6eb7e8e44e08d7dff853b6f8582fa3f35a7aa8a8", "content_id": "0ec66d43b995c89da5810ca1eff3facefde196cf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 112, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/HU.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass HU(Country):\n id = \"HU\"\n languages = [\"hu\"]\n default_lang = \"hu\"\n" }, { "alpha_fraction": 0.32159623503685, "alphanum_fraction": 0.36760562658309937, "avg_line_length": 20.744897842407227, "blob_id": "ac0f16422695c6617a05f3d6e09709586d766cb6", "content_id": "e6a85a64a790bfbae7b09aa0f64bdff189ede591", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2149, "license_type": "permissive", "max_line_length": 67, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Uusaasta',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-02-24',\n 'description': 'Iseseisvuspäev, Eesti Vabariigi aastapäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Suur reede',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-04',\n 'description': 'Ülestõusmispühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Kevadpüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-23',\n 'description': 'Nelipühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-06-23',\n 'description': 'Võidupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-06-24',\n 'description': 'Jaanipäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-08-20',\n 'description': 'Taasiseseisvumispäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-24',\n 'description': 'Jõululaupäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Esimene jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Teine jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31302428245544434, "alphanum_fraction": 0.35894039273262024, "avg_line_length": 20.37735939025879, "blob_id": "751e824a719610cbc726077cebe16fcea14f9021", "content_id": "71bb8245928c54f7db751813ca6ba0be82829318", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2273, "license_type": "permissive", "max_line_length": 47, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': 'Nyårsdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-01-06',\n 'description': 'Trettondedagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-04-22',\n 'description': 'Långfredagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-24',\n 'description': 'Påskdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-25',\n 'description': 'Annandag påsk',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-05-01',\n 'description': 'Första maj',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-02',\n 'description': 'Kristi himmelfärdsdag',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-12',\n 'description': 'Pingst',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-25',\n 'description': 'Midsommardagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-11-05',\n 'description': 'Alla helgons dag',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-12-06',\n 'description': 'Självständighetsdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Juldagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-26',\n 'description': 'Annandag jul',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3293954133987427, "alphanum_fraction": 0.37665045261383057, "avg_line_length": 21.146154403686523, "blob_id": "f54f1eb4da584cd035295daad9aa6c6d0b87e328", "content_id": "cc0c3b168af8c83189571cea62daaf0bcf5087fe", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2900, "license_type": "permissive", "max_line_length": 49, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': 'Nýársdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-03-28',\n 'description': 'Skírdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-03-29',\n 'description': 'Föstudagurinn langi',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-03-31',\n 'description': 'Páskadagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Annar dagur páska',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-04-25',\n 'description': 'Sumardagurinn fyrsti',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2013-05-01',\n 'description': 'Verkalýðsdagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-09',\n 'description': 'Uppstigningardagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-19',\n 'description': 'Hvítasunnudagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-20',\n 'description': 'Annar dagur hvítasunnu',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-06-17',\n 'description': 'Þjóðhátíðardagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-08-05',\n 'description': 'Frídagur verslunarmanna',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2013-12-24',\n 'description': 'Aðfangadagur jóla',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-25',\n 'description': 'Jóladagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-26',\n 'description': 'Annar dagur jóla',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-31',\n 'description': 'Gamlársdagur',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3322019577026367, "alphanum_fraction": 0.3763258457183838, "avg_line_length": 21.245283126831055, "blob_id": "379dabf00d61080610fd0e5b0b62605b8961bb52", "content_id": "e9d6741d9c8be8862e8c80444f9b418784de597b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2378, "license_type": "permissive", "max_line_length": 59, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pl_PL-2017] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2017-01-01',\n 'description': 'Nowy Rok',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-01-06',\n 'description': 'Trzech Króli',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-04-16',\n 'description': 'Wielkanoc',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-17',\n 'description': 'Poniedziałek Wielkanocny',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-05-01',\n 'description': 'Święto Pracy',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-05-03',\n 'description': 'Święto Konstytucji Trzeciego Maja',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-06-04',\n 'description': 'Zielone Świątki',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-15',\n 'description': 'Boże Ciało',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-08-15',\n 'description': 'Wniebowzięcie Najświętszej Maryi Panny',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-11-01',\n 'description': 'Wszystkich Świętych',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-11-11',\n 'description': 'Narodowe Święto Niepodległości',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-12-25',\n 'description': 'Boże Narodzenie (pierwszy dzień)',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-26',\n 'description': 'Boże Narodzenie (drugi dzień)',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.320070743560791, "alphanum_fraction": 0.3648688495159149, "avg_line_length": 21.03896141052246, "blob_id": "9a2d3735e55635dd0afd3a15bd3dbe8456853c68", "content_id": "36b922f4553f41e863500d62b96816ecfcafbaff", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3406, "license_type": "permissive", "max_line_length": 53, "num_lines": 154, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': 'Año Nuevo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-01-09',\n 'description': 'Día de los Reyes Magos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-03-20',\n 'description': 'Día de San José',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-06',\n 'description': 'Jueves Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-07',\n 'description': 'Viernes Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-09',\n 'description': 'Domingo de Pascua',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-01',\n 'description': 'Día del Trabajo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-22',\n 'description': 'La Ascensión del Señor',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-06-12',\n 'description': 'Corpus Christi',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-06-19',\n 'description': 'El Sagrado Corazón de Jesús',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-07-03',\n 'description': 'San Pedro y San Pablo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-07-20',\n 'description': 'Grito de Independencia',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-08-07',\n 'description': 'Batalla de Boyacá',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-08-21',\n 'description': 'Asunción de la Virgen',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-10-16',\n 'description': 'Día de la Raza',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-11-06',\n 'description': 'Todos los Santos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-11-13',\n 'description': 'Independencia de Cartagena',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-12-08',\n 'description': 'Inmaculada Concepción',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Navidad',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3136403262615204, "alphanum_fraction": 0.3588184118270874, "avg_line_length": 20.726415634155273, "blob_id": "114a8553660aebbbeb0395f46eba31aec7bd27c4", "content_id": "f351e6d36d0783f89306003cd97995f5cf1863bf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2302, "license_type": "permissive", "max_line_length": 51, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-01-03',\n 'description': \"New Year's Day (observed)\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-01-04',\n 'description': \"Day after New Year's Day\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-02-06',\n 'description': 'Waitangi Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-02-07',\n 'description': 'Waitangi Day (observed)',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Good Friday',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Easter Monday',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-25',\n 'description': 'ANZAC Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-06-06',\n 'description': \"Queen's Birthday\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-10-24',\n 'description': 'Labour Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Boxing Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-27',\n 'description': 'Christmas Day (observed)',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n }\n]" }, { "alpha_fraction": 0.31594958901405334, "alphanum_fraction": 0.36114731431007385, "avg_line_length": 20.716981887817383, "blob_id": "b369b7a3882736297878ed1e3b7a549a972af000", "content_id": "8a03be3503090cf182fd0026aa6585acfecfc914", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2301, "license_type": "permissive", "max_line_length": 48, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-03-21',\n 'description': 'Human Rights Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Good Friday',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Family Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-27',\n 'description': 'Freedom Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-05-01',\n 'description': \"Worker's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-06-16',\n 'description': 'Youth Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-06-17',\n 'description': 'Youth Day (Supplement)',\n 'locale': 'en-ZA',\n 'notes': 'Supplement holiday',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-08-09',\n 'description': \"National Women's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-09-24',\n 'description': 'Heritage Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-12-16',\n 'description': 'Day of Reconciliation',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Day of Goodwill',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.34026622772216797, "alphanum_fraction": 0.3843593895435333, "avg_line_length": 21.688678741455078, "blob_id": "792f07dd833f73319dc51f123313f89a68720239", "content_id": "749516c4f60a149d8614f1e8336bad06b6bd682e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2443, "license_type": "permissive", "max_line_length": 72, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Nový rok',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-01-01',\n 'description': 'Den obnovy samostatného českého státu',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Velikonoční pondělí',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Svátek práce',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-08',\n 'description': 'Den vítězství',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-07-05',\n 'description': 'Den slovanských věrozvěstů Cyrila a Metoděje',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-07-06',\n 'description': 'Den upálení mistra Jana Husa',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-09-28',\n 'description': 'Den české státnosti',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-10-28',\n 'description': 'Den vzniku samostatného československého státu',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-11-17',\n 'description': 'Den boje za svobodu a demokracii',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-12-24',\n 'description': 'Štědrý den',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-25',\n 'description': '1. svátek vánoční',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-26',\n 'description': '2. svátek vánoční',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31993502378463745, "alphanum_fraction": 0.3654080331325531, "avg_line_length": 20.61403465270996, "blob_id": "0a2ac6b6668ab392dd926c05335461d42d5fc884", "content_id": "c1975121073aa7f467467913cc8be0667110e4d9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2463, "license_type": "permissive", "max_line_length": 44, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_NL-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Nieuwjaarsdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Goede Vrijdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-17',\n 'description': 'Eerste Paasdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Tweede Paasdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-27',\n 'description': 'Koningsdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-05-04',\n 'description': 'Dodenherdenking',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-05',\n 'description': 'Bevrijdingsdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-26',\n 'description': 'Hemelvaartsdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-05',\n 'description': 'Eerste Pinksterdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Tweede Pinksterdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-12-05',\n 'description': 'Sinterklaas',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-15',\n 'description': 'Koninkrijksdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Eerste Kerstdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Tweede Kerstdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.30687829852104187, "alphanum_fraction": 0.35305434465408325, "avg_line_length": 20.224489212036133, "blob_id": "c8c4a48c228744df59ce8a7b33728d054c7ae3bf", "content_id": "b20e1b32b6d11e2e961fb74f895a46be719057ee", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2086, "license_type": "permissive", "max_line_length": 45, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': 'Nouvel An',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-04-24',\n 'description': 'Pâques',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-25',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-05-01',\n 'description': 'Fête du Travail',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-02',\n 'description': 'Ascension',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-12',\n 'description': 'Pentecôte',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-13',\n 'description': 'Lundi de Pentecôte',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-07-21',\n 'description': 'Fête nationale',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-08-15',\n 'description': 'Assomption',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-11-01',\n 'description': 'Toussaint',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-11-11',\n 'description': \"Jour de l'armistice\",\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Noël',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.6671949028968811, "alphanum_fraction": 0.6719492673873901, "avg_line_length": 29.047618865966797, "blob_id": "73aece1d8aafb050526c2aff15408ec3ef7d41e6", "content_id": "99f3c4d9e08008f4b43c12e97b8c93bd9f9d56e9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1262, "license_type": "permissive", "max_line_length": 82, "num_lines": 42, "path": "/tests/test_holidata.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "import pytest\nfrom snapshottest.file import FileSnapshot\nfrom snapshottest.formatter import Formatter\n\nfrom holidata import Locale\nfrom tests import HOLIDATA_YEAR_MAX\n\nSNAPSHOT_FILE_PATH_PATTERN = \"snapshots/snap_test_holidata/{}[{}-{}] 1.py\"\n\n\[email protected](params=range(2011, HOLIDATA_YEAR_MAX))\ndef year(request):\n return request.param\n\n\[email protected](params=Locale.plugins)\ndef locale(request, year):\n return request.param(year)\n\n\ndef test_holidata_produces_holidays_for_locale_and_year(snapshot, tmpdir, locale):\n temp_file = tmpdir.join(\"{}.{}.py\".format(locale.locale, locale.year))\n\n export_data = [h.as_dict() for h in locale.holidays]\n export_data.sort(key=lambda x: x[\"date\"])\n temp_file.write(Formatter().format(export_data, 0))\n\n try:\n snapshot.assert_match(FileSnapshot(str(temp_file)))\n except AssertionError:\n with open(temp_file, \"r\") as tf:\n actual = \"\".join(tf.readlines())\n\n snapshot_file = SNAPSHOT_FILE_PATH_PATTERN.format(\n \"test_holidata_produces_holidays_for_locale_and_year\",\n locale.__class__.__name__,\n locale.year)\n\n with open(snapshot_file) as sf:\n expected = \"\".join(sf.readlines())\n\n assert (actual == expected)\n" }, { "alpha_fraction": 0.31509268283843994, "alphanum_fraction": 0.3614298403263092, "avg_line_length": 20.38679313659668, "blob_id": "d59585c5b9eb48f3fd93d16db2f8c103f3c09e54", "content_id": "e39ada047baeef6d95a1c334a6ab1d6650dd3c65", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2293, "license_type": "permissive", "max_line_length": 42, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': 'Uudenvuodenpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-01-06',\n 'description': 'Loppiainen',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Pitkäperjantai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-21',\n 'description': 'Pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-22',\n 'description': '2. pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Vappu',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-05-30',\n 'description': 'Helatorstai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-09',\n 'description': 'Helluntaipäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-22',\n 'description': 'Juhannuspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-11-02',\n 'description': 'Pyhäinpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-12-06',\n 'description': 'Itsenäisyyspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Joulupäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Tapaninpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3056092858314514, "alphanum_fraction": 0.3520309329032898, "avg_line_length": 20.112245559692383, "blob_id": "81c85708ee89e275b260eb0112f7bb49ccaeac82", "content_id": "0d2ebd27d78647e145df34d171e0b663273df1d4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2068, "license_type": "permissive", "max_line_length": 42, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-04-20',\n 'description': 'Ostern',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Ostermontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-29',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-08',\n 'description': 'Pfingsten',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Pfingstmontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-07-21',\n 'description': 'Nationalfeiertag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-11-11',\n 'description': 'Waffenstillstand',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Weihnacht',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.320070743560791, "alphanum_fraction": 0.3648688495159149, "avg_line_length": 21.03896141052246, "blob_id": "ad60cb33455380e68c1b27c836f092f0a38ba3a4", "content_id": "d662aed2aa313c5746c7b8f207cb4583ef1d7380", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3406, "license_type": "permissive", "max_line_length": 53, "num_lines": 154, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': 'Año Nuevo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-01-10',\n 'description': 'Día de los Reyes Magos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-03-21',\n 'description': 'Día de San José',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-21',\n 'description': 'Jueves Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-22',\n 'description': 'Viernes Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-24',\n 'description': 'Domingo de Pascua',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-05-01',\n 'description': 'Día del Trabajo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-06',\n 'description': 'La Ascensión del Señor',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-27',\n 'description': 'Corpus Christi',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-07-04',\n 'description': 'El Sagrado Corazón de Jesús',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-07-04',\n 'description': 'San Pedro y San Pablo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-07-20',\n 'description': 'Grito de Independencia',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-08-07',\n 'description': 'Batalla de Boyacá',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-08-15',\n 'description': 'Asunción de la Virgen',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-10-17',\n 'description': 'Día de la Raza',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2011-11-07',\n 'description': 'Todos los Santos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-11-14',\n 'description': 'Independencia de Cartagena',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2011-12-08',\n 'description': 'Inmaculada Concepción',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Navidad',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3136601746082306, "alphanum_fraction": 0.3593526780605316, "avg_line_length": 20.4489803314209, "blob_id": "03f61b837f1e5c8d9d32f5614c09d81e8982c36f", "content_id": "5d6ee5ca3bb424e2f97083383acdcd6e291d1761", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2101, "license_type": "permissive", "max_line_length": 49, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[it_IT-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': 'Capodanno',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-01-06',\n 'description': 'Epifania',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-04-21',\n 'description': 'Pasqua',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Pasquetta',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-25',\n 'description': 'Festa della liberazione',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Festa del lavoro',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-06-02',\n 'description': 'Festa della repubblica',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-08-15',\n 'description': 'Assunzione (ferragosto)',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-11-01',\n 'description': 'Ognissanti',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-08',\n 'description': 'Immacolata concezione',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Natale',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'S.to Stefano',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.30519479513168335, "alphanum_fraction": 0.35304170846939087, "avg_line_length": 20.21014404296875, "blob_id": "ff98d46c12c71f905a827d8a4a2986ee68127e99", "content_id": "b8f9b914256892904e2e8e9aec83aad63f65192e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2934, "license_type": "permissive", "max_line_length": 46, "num_lines": 138, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': 'Nyttårsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-02-15',\n 'description': 'Fastelavn',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-03-29',\n 'description': 'Palmesøndag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-02',\n 'description': 'Skjærtorsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-03',\n 'description': 'Langfredag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-05',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-06',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-01',\n 'description': 'Offentlig Høytidsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-08',\n 'description': 'Frigjøringsdag 1945',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-14',\n 'description': 'Kristi Himmelfartsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-17',\n 'description': 'Grunnlovsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-24',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-25',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-12-24',\n 'description': 'Julaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-26',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-31',\n 'description': 'Nyttårsaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.335544615983963, "alphanum_fraction": 0.379011869430542, "avg_line_length": 21.940580368041992, "blob_id": "f7d11130984d0c73cea592583fe0a3bc033fb0d9", "content_id": "7d5a1dcc558df04928830e2aed62e604bc46ffd5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 15901, "license_type": "permissive", "max_line_length": 76, "num_lines": 690, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2020] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2020-01-01',\n 'description': 'Año Nuevo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-01-06',\n 'description': 'Epifanía del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-02-28',\n 'description': 'Día de Andalucía',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'F'\n },\n {\n 'date': '2020-03-13',\n 'description': 'Estatuto de Autonomía de la Ciudad de Melilla',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'F'\n },\n {\n 'date': '2020-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'RF'\n },\n {\n 'date': '2020-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'RF'\n },\n {\n 'date': '2020-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'RF'\n },\n {\n 'date': '2020-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RF'\n },\n {\n 'date': '2020-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'RF'\n },\n {\n 'date': '2020-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'RF'\n },\n {\n 'date': '2020-04-09',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-09',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-09',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-09',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-09',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-09',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-09',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-09',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CN',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-09',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-09',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-09',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-09',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-09',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-09',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-09',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-09',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-09',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-10',\n 'description': 'Viernes Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-12',\n 'description': 'Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'RV'\n },\n {\n 'date': '2020-04-23',\n 'description': 'Fiesta de Castilla y León',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'F'\n },\n {\n 'date': '2020-04-23',\n 'description': 'San Jorge / Día de Aragón',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RF'\n },\n {\n 'date': '2020-05-01',\n 'description': 'Fiesta del Trabajo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-02',\n 'description': 'Fiesta de la Comunidad de Madrid',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'F'\n },\n {\n 'date': '2020-05-30',\n 'description': 'Día de Canarias',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CN',\n 'type': 'F'\n },\n {\n 'date': '2020-06-09',\n 'description': 'Día de la Región de Murcia',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'F'\n },\n {\n 'date': '2020-06-09',\n 'description': 'Día de La Rioja',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'F'\n },\n {\n 'date': '2020-06-11',\n 'description': 'Corpus Christi',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'RV'\n },\n {\n 'date': '2020-06-24',\n 'description': 'San Juan',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'RF'\n },\n {\n 'date': '2020-06-24',\n 'description': 'San Juan',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'RF'\n },\n {\n 'date': '2020-06-24',\n 'description': 'San Juan',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'RF'\n },\n {\n 'date': '2020-07-25',\n 'description': 'Santiago Apóstol',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'RF'\n },\n {\n 'date': '2020-07-25',\n 'description': 'Santiago Apóstol / Día Nacional de Galicia',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'RF'\n },\n {\n 'date': '2020-07-28',\n 'description': 'Día de las Instituciones de Cantabria',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'F'\n },\n {\n 'date': '2020-07-31',\n 'description': 'Fiesta del Sacrificio (Aid El Kebir)',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RV'\n },\n {\n 'date': '2020-07-31',\n 'description': 'Fiesta del Sacrificio (Eidul Adha)',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'RV'\n },\n {\n 'date': '2020-08-15',\n 'description': 'Asunción de la Virgen',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-09-02',\n 'description': 'Día de Ceuta',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'F'\n },\n {\n 'date': '2020-09-08',\n 'description': 'Día de Asturias',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'F'\n },\n {\n 'date': '2020-09-08',\n 'description': 'Día de Extremadura',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'F'\n },\n {\n 'date': '2020-09-11',\n 'description': 'Fiesta Nacional de Cataluña',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'F'\n },\n {\n 'date': '2020-09-15',\n 'description': 'La Bien Aparecida',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'RF'\n },\n {\n 'date': '2020-10-09',\n 'description': 'Día de la Comunitat Valenciana',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'F'\n },\n {\n 'date': '2020-10-12',\n 'description': 'Fiesta Nacional de España',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-11-01',\n 'description': 'Todos los Santos',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-11-02',\n 'description': 'Lunes siguiente a Todos los Santos',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'F'\n },\n {\n 'date': '2020-11-02',\n 'description': 'Lunes siguiente a Todos los Santos',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'F'\n },\n {\n 'date': '2020-11-02',\n 'description': 'Lunes siguiente a Todos los Santos',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'F'\n },\n {\n 'date': '2020-11-02',\n 'description': 'Lunes siguiente a Todos los Santos',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'F'\n },\n {\n 'date': '2020-11-02',\n 'description': 'Lunes siguiente a Todos los Santos',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'F'\n },\n {\n 'date': '2020-11-02',\n 'description': 'Lunes siguiente a Todos los Santos',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'F'\n },\n {\n 'date': '2020-12-06',\n 'description': 'Día de la Constitución Española',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-12-07',\n 'description': 'Lunes siguiente al Día de la Constitución Española',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'F'\n },\n {\n 'date': '2020-12-07',\n 'description': 'Lunes siguiente al Día de la Constitución Española',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'F'\n },\n {\n 'date': '2020-12-07',\n 'description': 'Lunes siguiente al Día de la Constitución Española',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'F'\n },\n {\n 'date': '2020-12-07',\n 'description': 'Lunes siguiente al Día de la Constitución Española',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'F'\n },\n {\n 'date': '2020-12-07',\n 'description': 'Lunes siguiente al Día de la Constitución Española',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'F'\n },\n {\n 'date': '2020-12-07',\n 'description': 'Lunes siguiente al Día de la Constitución Española',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CN',\n 'type': 'F'\n },\n {\n 'date': '2020-12-07',\n 'description': 'Lunes siguiente al Día de la Constitución Española',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'F'\n },\n {\n 'date': '2020-12-07',\n 'description': 'Lunes siguiente al Día de la Constitución Española',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'F'\n },\n {\n 'date': '2020-12-07',\n 'description': 'Lunes siguiente al Día de la Constitución Española',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'F'\n },\n {\n 'date': '2020-12-07',\n 'description': 'Lunes siguiente al Día de la Constitución Española',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'F'\n },\n {\n 'date': '2020-12-07',\n 'description': 'Lunes siguiente al Día de la Constitución Española',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'F'\n },\n {\n 'date': '2020-12-07',\n 'description': 'Lunes siguiente al Día de la Constitución Española',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'F'\n },\n {\n 'date': '2020-12-07',\n 'description': 'Lunes siguiente al Día de la Constitución Española',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'F'\n },\n {\n 'date': '2020-12-08',\n 'description': 'Inmaculada Concepción',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-25',\n 'description': 'Natividad del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'RF'\n },\n {\n 'date': '2020-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'RF'\n }\n]" }, { "alpha_fraction": 0.6129870414733887, "alphanum_fraction": 0.65064936876297, "avg_line_length": 29.799999237060547, "blob_id": "9af3f28623200f5f2ce03879d12fa1e25d89fa7d", "content_id": "0f5ccf1da183e2918a190266721294413d03e799", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 785, "license_type": "permissive", "max_line_length": 64, "num_lines": 25, "path": "/src/holidata/holidays/es-US.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom .holidays import Locale\n\n\nclass es_US(Locale):\n \"\"\"\n 01-01: [NF] Año Neuvo\n 07-04: [NF] Día de la Independiencia\n 11-11: [NF] Día de los Veteranos\n 12-24: [NRF] Nochebuena\n 12-25: [NRF] Navidad\n 3. monday in January: [NV] Cumpleaños de Martin Luther King, Jr.\n 3. monday in February: [NV] Día del Presidente\n 3. monday in April: [MA,ME] [V] Día del Patriota\n 1. last monday in May: [NV] Día de los Caídos\n 1. monday in September: [NV] Día del Trabajo\n 2. monday in October: [NV] Día de Columbus\n 4. thursday in November: [NV] Día de Acción de Gracias\n 4. friday in November: [NV] Día después de Acción de Gracias\n \"\"\"\n\n locale = \"es-US\"\n easter_type = EASTER_WESTERN\n" }, { "alpha_fraction": 0.32057416439056396, "alphanum_fraction": 0.3652312457561493, "avg_line_length": 21.008771896362305, "blob_id": "11c311dda2408a5f9ef224ffc8b9a3d0977d5e08", "content_id": "44363283176b8b34e0a146fe5b2a1d2b2ff559c0", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2524, "license_type": "permissive", "max_line_length": 58, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_US-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Año Neuvo',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-01-20',\n 'description': 'Cumpleaños de Martin Luther King, Jr.',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-02-17',\n 'description': 'Día del Presidente',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Día del Patriota',\n 'locale': 'es-US',\n 'notes': '',\n 'region': 'MA',\n 'type': 'V'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Día del Patriota',\n 'locale': 'es-US',\n 'notes': '',\n 'region': 'ME',\n 'type': 'V'\n },\n {\n 'date': '2014-05-26',\n 'description': 'Día de los Caídos',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-07-04',\n 'description': 'Día de la Independiencia',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-09-01',\n 'description': 'Día del Trabajo',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-10-13',\n 'description': 'Día de Columbus',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-11-11',\n 'description': 'Día de los Veteranos',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-11-27',\n 'description': 'Día de Acción de Gracias',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-11-28',\n 'description': 'Día después de Acción de Gracias',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-12-24',\n 'description': 'Nochebuena',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Navidad',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3156331181526184, "alphanum_fraction": 0.37207576632499695, "avg_line_length": 21.081966400146484, "blob_id": "58b04cdadef961deb2d690e0090c216830809898", "content_id": "b694b320288df47a7ad3372457f8a5ed573a8455", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2719, "license_type": "permissive", "max_line_length": 54, "num_lines": 122, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': 'Újév',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-01-02',\n 'description': 'Munkaszüneti nap',\n 'locale': 'hu-HU',\n 'notes': '2015-01-10 munkanap',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-03-15',\n 'description': 'Az 1848-as forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-04-05',\n 'description': 'Húsvét',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-06',\n 'description': 'Húsvéthétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-01',\n 'description': 'A munka ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-24',\n 'description': 'Pünkösd',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-25',\n 'description': 'Pünkösdhétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-08-20',\n 'description': 'Az államalapítás ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-08-21',\n 'description': 'Munkaszüneti nap',\n 'locale': 'hu-HU',\n 'notes': '2015-08-08 munkanap',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-10-23',\n 'description': 'Az 1956-os forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-11-01',\n 'description': 'Mindenszentek',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-24',\n 'description': 'Munkaszüneti nap',\n 'locale': 'hu-HU',\n 'notes': '2015-12-12 munkanap',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-26',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3104265332221985, "alphanum_fraction": 0.35592415928840637, "avg_line_length": 20.540817260742188, "blob_id": "323d0810fc911df3db2ef998a17e0743ecb405aa", "content_id": "ecaf25a5d259b8327347b79d0a795616f3f4d702", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2110, "license_type": "permissive", "max_line_length": 50, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2020] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2020-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-01-02',\n 'description': \"Day after New Year's Day\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2020-02-06',\n 'description': 'Waitangi Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-04-10',\n 'description': 'Good Friday',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Easter Monday',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-25',\n 'description': 'ANZAC Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-04-27',\n 'description': 'ANZAC Day (observed)',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2020-06-01',\n 'description': \"Queen's Birthday\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2020-10-26',\n 'description': 'Labour Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2020-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-26',\n 'description': 'Boxing Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-12-28',\n 'description': 'Boxing Day (observed)',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n }\n]" }, { "alpha_fraction": 0.32057416439056396, "alphanum_fraction": 0.3652312457561493, "avg_line_length": 21.008771896362305, "blob_id": "16d9e7ec50de0f49b3bd8a8889221aaa500703ee", "content_id": "b94821a5574353c4d1721800b98a256dae7e2e7b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2524, "license_type": "permissive", "max_line_length": 58, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_US-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': 'Año Neuvo',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-01-21',\n 'description': 'Cumpleaños de Martin Luther King, Jr.',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2013-02-18',\n 'description': 'Día del Presidente',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2013-04-15',\n 'description': 'Día del Patriota',\n 'locale': 'es-US',\n 'notes': '',\n 'region': 'MA',\n 'type': 'V'\n },\n {\n 'date': '2013-04-15',\n 'description': 'Día del Patriota',\n 'locale': 'es-US',\n 'notes': '',\n 'region': 'ME',\n 'type': 'V'\n },\n {\n 'date': '2013-05-27',\n 'description': 'Día de los Caídos',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2013-07-04',\n 'description': 'Día de la Independiencia',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-09-02',\n 'description': 'Día del Trabajo',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2013-10-14',\n 'description': 'Día de Columbus',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2013-11-11',\n 'description': 'Día de los Veteranos',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-11-22',\n 'description': 'Día después de Acción de Gracias',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2013-11-28',\n 'description': 'Día de Acción de Gracias',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2013-12-24',\n 'description': 'Nochebuena',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-25',\n 'description': 'Navidad',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31798437237739563, "alphanum_fraction": 0.3782218396663666, "avg_line_length": 21.428571701049805, "blob_id": "826e232171e5fbe6eb2e70ac7962221f0bd15ab7", "content_id": "66aff3916cf09302ad0cfbcee57fb6c5b2e1baf2", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3483, "license_type": "permissive", "max_line_length": 54, "num_lines": 154, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2018] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2018-01-01',\n 'description': 'Újév',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-03-15',\n 'description': 'Az 1848-as forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-03-16',\n 'description': 'Munkaszüneti nap',\n 'locale': 'hu-HU',\n 'notes': '2018-03-10 munkanap',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-03-30',\n 'description': 'Nagypéntek',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-01',\n 'description': 'Húsvét',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-02',\n 'description': 'Húsvéthétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-30',\n 'description': 'Munkaszüneti nap',\n 'locale': 'hu-HU',\n 'notes': '2018-04-21 munkanap',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-05-01',\n 'description': 'A munka ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-05-20',\n 'description': 'Pünkösd',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-05-21',\n 'description': 'Pünkösdhétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-08-20',\n 'description': 'Az államalapítás ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-10-22',\n 'description': 'Munkaszüneti nap',\n 'locale': 'hu-HU',\n 'notes': '2018-10-13 munkanap',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-10-23',\n 'description': 'Az 1956-os forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-11-01',\n 'description': 'Mindenszentek',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-11-02',\n 'description': 'Munkaszüneti nap',\n 'locale': 'hu-HU',\n 'notes': '2018-11-10 munkanap',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-12-24',\n 'description': 'Munkaszüneti nap',\n 'locale': 'hu-HU',\n 'notes': '2018-12-01 munkanap',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-12-25',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-12-26',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-12-31',\n 'description': 'Munkaszüneti nap',\n 'locale': 'hu-HU',\n 'notes': '2018-12-15 munkanap',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.321296364068985, "alphanum_fraction": 0.36611029505729675, "avg_line_length": 21.315942764282227, "blob_id": "d229d2b089946103fc345045b5558ad50f6bc477", "content_id": "35105e6cc65130ace58750043b7e6ddf91c30ad3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 15432, "license_type": "permissive", "max_line_length": 68, "num_lines": 690, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': 'Año Nuevo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-01-06',\n 'description': 'Epifanía del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-02-29',\n 'description': 'Día de Andalucía',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'F'\n },\n {\n 'date': '2016-03-01',\n 'description': 'Día de las Illes Balears',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'F'\n },\n {\n 'date': '2016-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'RF'\n },\n {\n 'date': '2016-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RF'\n },\n {\n 'date': '2016-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'RF'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CN',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-25',\n 'description': 'Viernes Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-27',\n 'description': 'Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-28',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-28',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-28',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-28',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-28',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'RV'\n },\n {\n 'date': '2016-03-28',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'RV'\n },\n {\n 'date': '2016-04-23',\n 'description': 'Fiesta de Castilla y León',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'F'\n },\n {\n 'date': '2016-04-23',\n 'description': 'San Jorge / Día de Aragón',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RF'\n },\n {\n 'date': '2016-05-01',\n 'description': 'Fiesta del Trabajo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-02',\n 'description': 'Lunes siguiente a la Fiesta del Trabajo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'F'\n },\n {\n 'date': '2016-05-02',\n 'description': 'Lunes siguiente a la Fiesta del Trabajo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'F'\n },\n {\n 'date': '2016-05-02',\n 'description': 'Lunes siguiente a la Fiesta del Trabajo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'F'\n },\n {\n 'date': '2016-05-02',\n 'description': 'Lunes siguiente a la Fiesta del Trabajo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'F'\n },\n {\n 'date': '2016-05-02',\n 'description': 'Lunes siguiente a la Fiesta del Trabajo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CN',\n 'type': 'F'\n },\n {\n 'date': '2016-05-02',\n 'description': 'Lunes siguiente a la Fiesta del Trabajo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'F'\n },\n {\n 'date': '2016-05-02',\n 'description': 'Lunes siguiente a la Fiesta del Trabajo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'F'\n },\n {\n 'date': '2016-05-16',\n 'description': 'Lunes de Pascua Granada',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'F'\n },\n {\n 'date': '2016-05-17',\n 'description': 'Día de las Letras Gallegas',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'F'\n },\n {\n 'date': '2016-05-26',\n 'description': 'Corpus Christi',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'RV'\n },\n {\n 'date': '2016-05-30',\n 'description': 'Día de Canarias',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CN',\n 'type': 'F'\n },\n {\n 'date': '2016-05-31',\n 'description': 'Día de Castilla-La Mancha',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'F'\n },\n {\n 'date': '2016-06-09',\n 'description': 'Día de la Región de Murcia',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'F'\n },\n {\n 'date': '2016-06-09',\n 'description': 'Día de La Rioja',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'F'\n },\n {\n 'date': '2016-06-24',\n 'description': 'San Juan',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'RF'\n },\n {\n 'date': '2016-06-24',\n 'description': 'San Juan',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'RF'\n },\n {\n 'date': '2016-07-25',\n 'description': 'Santiago Apóstol',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'RF'\n },\n {\n 'date': '2016-07-25',\n 'description': 'Santiago Apóstol',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RF'\n },\n {\n 'date': '2016-07-25',\n 'description': 'Santiago Apóstol',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'RF'\n },\n {\n 'date': '2016-07-25',\n 'description': 'Santiago Apóstol',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'RF'\n },\n {\n 'date': '2016-07-25',\n 'description': 'Santiago Apóstol / Día Nacional de Galicia',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'RF'\n },\n {\n 'date': '2016-07-28',\n 'description': 'Día de las Instituciones de Cantabria',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'F'\n },\n {\n 'date': '2016-08-15',\n 'description': 'Asunción de la Virgen',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-09-02',\n 'description': 'Día de Ceuta',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'F'\n },\n {\n 'date': '2016-09-08',\n 'description': 'Día de Asturias',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'F'\n },\n {\n 'date': '2016-09-08',\n 'description': 'Día de Extremadura',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'F'\n },\n {\n 'date': '2016-09-12',\n 'description': 'Fiesta del Sacrificio (Aid El Kebir)',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RV'\n },\n {\n 'date': '2016-09-12',\n 'description': 'Fiesta del Sacrificio (Eidul Adha)',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'RV'\n },\n {\n 'date': '2016-09-15',\n 'description': 'La Bien Aparecida',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'RF'\n },\n {\n 'date': '2016-10-07',\n 'description': '80º aniversario del primer Gobierno Vasco',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'F'\n },\n {\n 'date': '2016-10-12',\n 'description': 'Fiesta Nacional de España',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-11-01',\n 'description': 'Todos los Santos',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-06',\n 'description': 'Día de la Constitución Española',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-12-08',\n 'description': 'Inmaculada Concepción',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-25',\n 'description': 'Natividad del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'RF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'RF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'RF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'RF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'RF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'RF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'RF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'RF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'RF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'RF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'RF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'RF'\n }\n]" }, { "alpha_fraction": 0.30687829852104187, "alphanum_fraction": 0.35305434465408325, "avg_line_length": 20.224489212036133, "blob_id": "d785b518e305bcc02467449139e1bfe2a07b214a", "content_id": "7e1f49cea3196dce3abfe9e8629dfadc84694738", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2086, "license_type": "permissive", "max_line_length": 45, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2018] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2018-01-01',\n 'description': 'Nouvel An',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-04-01',\n 'description': 'Pâques',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-02',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-05-01',\n 'description': 'Fête du Travail',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-05-10',\n 'description': 'Ascension',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-05-20',\n 'description': 'Pentecôte',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-05-21',\n 'description': 'Lundi de Pentecôte',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-07-21',\n 'description': 'Fête nationale',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-08-15',\n 'description': 'Assomption',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-11-01',\n 'description': 'Toussaint',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-11-11',\n 'description': \"Jour de l'armistice\",\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-12-25',\n 'description': 'Noël',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31389227509498596, "alphanum_fraction": 0.35925474762916565, "avg_line_length": 20.66666603088379, "blob_id": "9263c3cdbf92b6c147c6b666698fa2741a7ff092", "content_id": "bdf50f4390ef0cd52185ffa1c2781a079b130cd7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2469, "license_type": "permissive", "max_line_length": 49, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2012] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2012-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-01-16',\n 'description': 'Birthday of Martin Luther King, Jr.',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2012-02-20',\n 'description': \"Washington's Birthday\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2012-04-16',\n 'description': \"Patriots' Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': 'MA',\n 'type': 'V'\n },\n {\n 'date': '2012-04-16',\n 'description': \"Patriots' Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': 'ME',\n 'type': 'V'\n },\n {\n 'date': '2012-05-28',\n 'description': 'Memorial Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2012-07-04',\n 'description': 'Independence Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-09-03',\n 'description': 'Labor Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2012-10-08',\n 'description': 'Columbus Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2012-11-11',\n 'description': 'Veterans Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-11-22',\n 'description': 'Thanksgiving Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2012-11-23',\n 'description': 'Day after Thanksgiving',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2012-12-24',\n 'description': 'Christmas Eve',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3125758171081543, "alphanum_fraction": 0.3643348217010498, "avg_line_length": 20.701753616333008, "blob_id": "823079761a873627de6c4c34b70b21096590c230", "content_id": "3dbccafd8ce66cabdf28878aa57c1f4ba8c1663d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2498, "license_type": "permissive", "max_line_length": 54, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Újév',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-03-15',\n 'description': 'Az 1848-as forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Nagypéntek',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-04',\n 'description': 'Húsvét',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Húsvéthétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-01',\n 'description': 'A munka ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-23',\n 'description': 'Pünkösd',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pünkösdhétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-08-20',\n 'description': 'Az államalapítás ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-10-23',\n 'description': 'Az 1956-os forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Mindenszentek',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-24',\n 'description': 'Munkaszüneti nap',\n 'locale': 'hu-HU',\n 'notes': '2021-12-11 munkanap',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.5535872578620911, "alphanum_fraction": 0.599645733833313, "avg_line_length": 28.710525512695312, "blob_id": "1e45ac60088e7def98d3d7feffd5c4d809a064cf", "content_id": "dcbfc6c46c434efb2bb9162515670ccb48b6d216", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1140, "license_type": "permissive", "max_line_length": 105, "num_lines": 38, "path": "/src/holidata/holidays/hr-HR.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom holidata.utils import SmartDayArrow\nfrom .holidays import Locale, Holiday\n\n\nclass hr_HR(Locale):\n \"\"\"\n 01-01: [NF] Nova Godina\n 01-06: [NRF] Sveta tri kralja\n 05-01: [NF] Praznik rada\n 05-30: [NF] Dan državnosti\n 06-22: [NF] Dan antifašističke borbe\n 08-05: [NF] Dan pobjede i domovinske zahvalnosti i Dan hrvatskih branitelja\n 08-15: [NRF] Velika Gospa\n 11-01: [NRF] Dan svih svetih\n 12-25: [NRF] Božić\n 12-26: [NRF] Sveti Stjepan\n Easter: [NRV] Uskrs\n 1 day after Easter: [NRV] Uskršnji ponedjeljak\n 60 days after Easter: [NRV] Tijelovo\n \"\"\"\n\n locale = \"hr-HR\"\n easter_type = EASTER_WESTERN\n\n def holiday_dan_sjecanja_na_zrtve_domovinskog_rata_i_dan_sjecanja_na_zrtvu_vukovara_i_skabrnje(self):\n if self.year >= 2020:\n return [Holiday(\n self.locale,\n \"\",\n SmartDayArrow(self.year, 11, 18),\n \"Dan sjećanja na žrtve Domovinskog rata i Dan sjećanja na žrtvu Vukovara i Škabrnje\",\n \"NF\"\n )]\n else:\n return []\n" }, { "alpha_fraction": 0.3047436475753784, "alphanum_fraction": 0.3545759320259094, "avg_line_length": 20.306121826171875, "blob_id": "1a73e95ff67bc8c478d7ff29bdc26d530b9cf325", "content_id": "dfa1443725f468a05df64b8bf19e56a10b39bd75", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2093, "license_type": "permissive", "max_line_length": 45, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': \"Jour de l'an\",\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-03-28',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-01',\n 'description': 'Fête du premier mai',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-05',\n 'description': 'Ascension',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-08',\n 'description': 'Armistice 1945',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-15',\n 'description': 'Pentecôte',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-16',\n 'description': 'Lundi de Pentecôte',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-07-14',\n 'description': 'Fête nationale',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-08-15',\n 'description': 'Assomption',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-11-01',\n 'description': 'Toussaint',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-11-11',\n 'description': 'Armistice 1918',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-12-25',\n 'description': 'Noël',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3056092858314514, "alphanum_fraction": 0.3520309329032898, "avg_line_length": 20.112245559692383, "blob_id": "6ad464235c9c7be054580acc1831143bcead3c12", "content_id": "efad658fa83f1f78a06998c035c7dd4a780c984d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2068, "license_type": "permissive", "max_line_length": 42, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-04-05',\n 'description': 'Ostern',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-06',\n 'description': 'Ostermontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-14',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-24',\n 'description': 'Pfingsten',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-25',\n 'description': 'Pfingstmontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-07-21',\n 'description': 'Nationalfeiertag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-11-11',\n 'description': 'Waffenstillstand',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Weihnacht',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3191489279270172, "alphanum_fraction": 0.3645389974117279, "avg_line_length": 20.59183692932129, "blob_id": "d2c551d66a8482e7f0a17eef37eaa83375beeda6", "content_id": "1469ab210afb7cdd5d9bf0d8168cee2388dbdc87", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2115, "license_type": "permissive", "max_line_length": 53, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_BE-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': 'Nieuwjaar',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-04-05',\n 'description': 'Pasen',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-06',\n 'description': 'Paasmaandag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-01',\n 'description': 'Dag van de arbeid',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-14',\n 'description': 'Onze Lieve Heer hemelvaart',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-24',\n 'description': 'Pinksteren',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-25',\n 'description': 'Pinkstermaandag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-07-21',\n 'description': 'Nationale feestdag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-08-15',\n 'description': 'Onze Lieve Vrouw hemelvaart',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-11-11',\n 'description': 'Wapenstilstand',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Kerstmis',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.30687829852104187, "alphanum_fraction": 0.35305434465408325, "avg_line_length": 20.224489212036133, "blob_id": "0f779c5bd6bae34f03725c47f9d706397f08a4e5", "content_id": "3f0e8a5ca81680a3b65b88cc62f098781e55b518", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2086, "license_type": "permissive", "max_line_length": 45, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': 'Nouvel An',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-04-09',\n 'description': 'Pâques',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-10',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-01',\n 'description': 'Fête du Travail',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-18',\n 'description': 'Ascension',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-28',\n 'description': 'Pentecôte',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-29',\n 'description': 'Lundi de Pentecôte',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-07-21',\n 'description': 'Fête nationale',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-08-15',\n 'description': 'Assomption',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-11-01',\n 'description': 'Toussaint',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-11-11',\n 'description': \"Jour de l'armistice\",\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Noël',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.32693910598754883, "alphanum_fraction": 0.37322768568992615, "avg_line_length": 21.632076263427734, "blob_id": "ae5349e15945d20001e425fcceed1971de1671a6", "content_id": "f0e33dc902cdee3ece9fd79059e25a9f2595bf15", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2427, "license_type": "permissive", "max_line_length": 65, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': 'Yılbaşı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-04-23',\n 'description': 'Ulusal Egemenlik ve Çocuk Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-01',\n 'description': 'Emek ve Dayanışma Günü',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-19',\n 'description': \"Atatürk'ü Anma, Gençlik ve Spor Bayramı\",\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-08-08',\n 'description': 'Ramazan Bayramı (1. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-08-09',\n 'description': 'Ramazan Bayramı (2. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-08-10',\n 'description': 'Ramazan Bayramı (3. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-08-30',\n 'description': 'Zafer Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-10-15',\n 'description': 'Kurban Bayramı (1. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-10-16',\n 'description': 'Kurban Bayramı (2. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-10-17',\n 'description': 'Kurban Bayramı (3. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-10-18',\n 'description': 'Kurban Bayramı (4. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-10-29',\n 'description': 'Cumhuriyet Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.5982142686843872, "alphanum_fraction": 0.5982142686843872, "avg_line_length": 15, "blob_id": "72b6bde7cb0c13a396612e1d41c2674f4daafe8f", "content_id": "b38135c01799672fc2701db6ddba482ec5f0e868", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 112, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/NL.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass NL(Country):\n id = \"NL\"\n languages = [\"nl\"]\n default_lang = \"nl\"\n" }, { "alpha_fraction": 0.3381712734699249, "alphanum_fraction": 0.3817126154899597, "avg_line_length": 21.598360061645508, "blob_id": "2b3724bb5a1398dc12a0cafc279f74bca7a54809", "content_id": "856c4ca25faab0ab3f9a6dad4619ad4a4134e363", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2790, "license_type": "permissive", "max_line_length": 58, "num_lines": 122, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Deň vzniku Slovenskej republiky',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-01-06',\n 'description': 'Zjavenie Pána / Traja králi',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Veľký piatok',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Veľkonočný pondelok',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Sviatok práce',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-08',\n 'description': 'Deň víťazstva nad fašizmom',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-07-05',\n 'description': 'Sviatok svätého Cyrila a Metoda',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-08-29',\n 'description': 'Výročie SNP',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-09-01',\n 'description': 'Deň Ústavy Slovenskej republiky',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-09-15',\n 'description': 'Sedembolestná Panna Mária',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Sviatok všetkých svätých',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-11-17',\n 'description': 'Deň boja za slobodu a demokraciu',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-12-24',\n 'description': 'Štedrý deň',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Prvý sviatok vianočný',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Druhý sviatok vianočný',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.32209739089012146, "alphanum_fraction": 0.3670412003993988, "avg_line_length": 20.66216278076172, "blob_id": "5a859812aecc39021e7cb39cd08cc6277c9504ba", "content_id": "2f97978d68603b999c6346840a0e08eee4d7a304", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1738, "license_type": "permissive", "max_line_length": 52, "num_lines": 74, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': 'Новый Год',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-01-07',\n 'description': 'Рождество Христово',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-02-23',\n 'description': 'День защитника Отечества',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-03-08',\n 'description': 'Международный женский день',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-04-16',\n 'description': 'Пасха',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-01',\n 'description': 'Праздник весны и труда',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-09',\n 'description': 'День Победы',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-06-12',\n 'description': 'День России',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-11-04',\n 'description': 'День народного единства',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.30907854437828064, "alphanum_fraction": 0.3566814064979553, "avg_line_length": 20.31884002685547, "blob_id": "8b058d085f3a9d5c63a409d17684866397ec3f19", "content_id": "1a22131962d373d7fa4b9bbd54b600d35f5bc922", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2949, "license_type": "permissive", "max_line_length": 47, "num_lines": 138, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': 'Nyttårsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-02-07',\n 'description': 'Fastelavn',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-20',\n 'description': 'Palmesøndag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-24',\n 'description': 'Skjærtorsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-25',\n 'description': 'Langfredag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-27',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-28',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-01',\n 'description': 'Offentlig Høytidsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-05',\n 'description': 'Kristi Himmelfartsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-08',\n 'description': 'Frigjøringsdag 1945',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-15',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-16',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-17',\n 'description': 'Grunnlovsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-12-24',\n 'description': 'Julaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-25',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-31',\n 'description': 'Nyttårsaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.6034271717071533, "alphanum_fraction": 0.6780905723571777, "avg_line_length": 27.172412872314453, "blob_id": "93295cfbac885539141ff3c4f732c40b1a3186fe", "content_id": "38863852b4813a4cc7c26da7adcccdc74f514865", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 840, "license_type": "permissive", "max_line_length": 85, "num_lines": 29, "path": "/src/holidata/holidays/pl-PL.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom .holidays import Locale\n\n\"\"\"\nsource: http://prawo.sejm.gov.pl/isap.nsf/download.xsp/WDU20150000090/O/D20150090.pdf\n\"\"\"\n\n\nclass pl_PL(Locale):\n \"\"\"\n 01-01: [NF] Nowy Rok\n 01-06: [NRF] Trzech Króli\n 05-01: [NF] Święto Pracy\n 05-03: [NF] Święto Konstytucji Trzeciego Maja\n 08-15: [NRF] Wniebowzięcie Najświętszej Maryi Panny\n 11-01: [NRF] Wszystkich Świętych\n 11-11: [NF] Narodowe Święto Niepodległości\n 12-25: [NRF] Boże Narodzenie (pierwszy dzień)\n 12-26: [NRF] Boże Narodzenie (drugi dzień)\n Easter: [NRV] Wielkanoc\n 1 day after Easter: [NRV] Poniedziałek Wielkanocny\n 49 days after Easter: [NRV] Zielone Świątki\n 60 days after Easter: [NRV] Boże Ciało\n \"\"\"\n\n locale = \"pl-PL\"\n easter_type = EASTER_WESTERN\n" }, { "alpha_fraction": 0.31640625, "alphanum_fraction": 0.3615451455116272, "avg_line_length": 20.745283126831055, "blob_id": "ccc10d03ef71a8e35bdcd4469b91e5d6f1621e8c", "content_id": "544e14b4d422dafc19ed3fa5399907a6db80d03d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2304, "license_type": "permissive", "max_line_length": 51, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-03-21',\n 'description': 'Human Rights Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-04-22',\n 'description': 'Good Friday',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-25',\n 'description': 'Family Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-27',\n 'description': 'Freedom Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-05-01',\n 'description': \"Worker's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-05-02',\n 'description': \"Worker's Day (Supplement)\",\n 'locale': 'en-ZA',\n 'notes': 'Supplement holiday',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-16',\n 'description': 'Youth Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-08-09',\n 'description': \"National Women's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-09-24',\n 'description': 'Heritage Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-12-16',\n 'description': 'Day of Reconciliation',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-12-26',\n 'description': 'Day of Goodwill',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.543183445930481, "alphanum_fraction": 0.6231263279914856, "avg_line_length": 29.45652198791504, "blob_id": "32b4936bc7123a4053bcc0f348cbeb85b4e6803e", "content_id": "e61bfd6e2d4fce8bf44c96125d29fe0ce364a532", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1408, "license_type": "permissive", "max_line_length": 111, "num_lines": 46, "path": "/src/holidata/holidays/sl-SI.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom holidata.utils import SmartDayArrow\nfrom .holidays import Locale, Holiday\n\n\"\"\"\nsources: http://www.pisrs.si/Pis.web/pregledPredpisa?id=ZAKO865#\n\"\"\"\n\n\nclass sl_SI(Locale):\n \"\"\"\n 01-01: [NF] Novo leto\n 02-08: [NF] Prešernov dan\n 04-27: [NF] Dan upora proti okupatorju\n 05-01: [NF] Praznik dela\n 05-02: [NF] Praznik dela\n 06-25: [NF] Dan državnosti\n 08-15: [NRF] Marijino vnebovzetje\n 10-31: [NRF] Dan reformacije\n 11-01: [NF] Dan spomina na mrtve\n 12-25: [NF] Božič\n 12-26: [NF] Dan samostojnosti in enotnosti\n Easter: [NRV] Velikonočna nedelja\n 1 day after Easter: [NRV] Velikonočni ponedeljek\n 50 days after Easter: [NRV] Binkošti\n \"\"\"\n\n locale = \"sl-SI\"\n easter_type = EASTER_WESTERN\n\n def holiday_novo_leto(self):\n \"\"\"\n From 1955 until May 2012, when the National Assembly of Slovenia passed the Public Finance Balance Act,\n 2 January was a work-free day. It was reintroduced in 2017.\n 2012<: https://www.uradni-list.si/1/objava.jsp?sop=2012-01-1700\n 2016<: https://www.uradni-list.si/1/objava.jsp?sop=2016-01-3568\n \"\"\"\n return [Holiday(\n self.locale,\n \"\",\n SmartDayArrow(self.year, 1, 2),\n \"Novo leto\",\n \"NF\"\n )] if self.year not in [2013, 2014, 2015, 2016] else []\n" }, { "alpha_fraction": 0.3186638355255127, "alphanum_fraction": 0.3643445670604706, "avg_line_length": 20.852941513061523, "blob_id": "aaf840139f438e45b23b9f5afb6f84da45e54c5a", "content_id": "cfbab2ad787b772a801143528e4930bf9427e044", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 24547, "license_type": "permissive", "max_line_length": 45, "num_lines": 1122, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_CH-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Neujahrstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-01-02',\n 'description': 'Berchtoldstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BE',\n 'type': 'F'\n },\n {\n 'date': '2021-01-02',\n 'description': 'Berchtoldstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'F'\n },\n {\n 'date': '2021-01-02',\n 'description': 'Berchtoldstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'F'\n },\n {\n 'date': '2021-01-02',\n 'description': 'Berchtoldstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VD',\n 'type': 'F'\n },\n {\n 'date': '2021-01-06',\n 'description': 'Heilige Drei Könige',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2021-01-06',\n 'description': 'Heilige Drei Könige',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2021-01-06',\n 'description': 'Heilige Drei Könige',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2021-03-19',\n 'description': 'Josefstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RF'\n },\n {\n 'date': '2021-03-19',\n 'description': 'Josefstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2021-03-19',\n 'description': 'Josefstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2021-03-19',\n 'description': 'Josefstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2021-03-19',\n 'description': 'Josefstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VS',\n 'type': 'RF'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AG',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BE',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BL',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BS',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'FR',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GE',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GL',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GR',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NE',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'OW',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SG',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SH',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SO',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VD',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZG',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZH',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-04',\n 'description': 'Ostersonntag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BE',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BL',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BS',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GE',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GL',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GR',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SG',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SH',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VD',\n 'type': 'RV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZH',\n 'type': 'RV'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BL',\n 'type': 'F'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BS',\n 'type': 'F'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GR',\n 'type': 'F'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NE',\n 'type': 'F'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SH',\n 'type': 'F'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'F'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'F'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZH',\n 'type': 'F'\n },\n {\n 'date': '2021-05-13',\n 'description': 'Auffahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-23',\n 'description': 'Pfingstsonntag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BE',\n 'type': 'RV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BL',\n 'type': 'RV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BS',\n 'type': 'RV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GE',\n 'type': 'RV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GL',\n 'type': 'RV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GR',\n 'type': 'RV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SG',\n 'type': 'RV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SH',\n 'type': 'RV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'RV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VD',\n 'type': 'RV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZH',\n 'type': 'RV'\n },\n {\n 'date': '2021-06-03',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RV'\n },\n {\n 'date': '2021-06-03',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RV'\n },\n {\n 'date': '2021-06-03',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RV'\n },\n {\n 'date': '2021-06-03',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RV'\n },\n {\n 'date': '2021-06-03',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'OW',\n 'type': 'RV'\n },\n {\n 'date': '2021-06-03',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RV'\n },\n {\n 'date': '2021-06-03',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RV'\n },\n {\n 'date': '2021-06-03',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RV'\n },\n {\n 'date': '2021-06-03',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VS',\n 'type': 'RV'\n },\n {\n 'date': '2021-06-03',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZG',\n 'type': 'RV'\n },\n {\n 'date': '2021-08-01',\n 'description': 'Bundesfeier',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RF'\n },\n {\n 'date': '2021-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RF'\n },\n {\n 'date': '2021-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RF'\n },\n {\n 'date': '2021-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RF'\n },\n {\n 'date': '2021-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'OW',\n 'type': 'RF'\n },\n {\n 'date': '2021-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2021-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2021-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2021-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VS',\n 'type': 'RF'\n },\n {\n 'date': '2021-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZG',\n 'type': 'RF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GL',\n 'type': 'RF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'OW',\n 'type': 'RF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SG',\n 'type': 'RF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VS',\n 'type': 'RF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZG',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'OW',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VS',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZG',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Weihnachtstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BE',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BL',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BS',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GL',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GR',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SG',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SH',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZH',\n 'type': 'RF'\n }\n]" }, { "alpha_fraction": 0.30265551805496216, "alphanum_fraction": 0.34921789169311523, "avg_line_length": 20.153846740722656, "blob_id": "fbaecfbf0fe1c4aa58392ca10b7a61976adc539f", "content_id": "1e849b0af93e077200299a2ea5436aade6615606", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2753, "license_type": "permissive", "max_line_length": 42, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': 'Nyårsdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-01-06',\n 'description': 'Trettondedag jul',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-04-07',\n 'description': 'Långfredagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-09',\n 'description': 'Påskdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-10',\n 'description': 'Annandag påsk',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-01',\n 'description': 'Första maj',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-18',\n 'description': 'Kristi himmelsfärdsdag',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-28',\n 'description': 'Pingstdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-06-06',\n 'description': 'Nationaldagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-06-23',\n 'description': 'Midsommarafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-06-24',\n 'description': 'Midsommardagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-11-04',\n 'description': 'Alla helgons dag',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-12-24',\n 'description': 'Julafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Juldagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-26',\n 'description': 'Annandag jul',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-31',\n 'description': 'Nyårsafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31389227509498596, "alphanum_fraction": 0.35925474762916565, "avg_line_length": 20.66666603088379, "blob_id": "f9e2850a53f3306683d5874fff2b4c999705fae5", "content_id": "9d3768e05721234cf0ebb96fa158970c9dd4aa0a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2469, "license_type": "permissive", "max_line_length": 49, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-01-16',\n 'description': 'Birthday of Martin Luther King, Jr.',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-02-20',\n 'description': \"Washington's Birthday\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-04-17',\n 'description': \"Patriots' Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': 'MA',\n 'type': 'V'\n },\n {\n 'date': '2023-04-17',\n 'description': \"Patriots' Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': 'ME',\n 'type': 'V'\n },\n {\n 'date': '2023-05-29',\n 'description': 'Memorial Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-07-04',\n 'description': 'Independence Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-09-04',\n 'description': 'Labor Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-10-09',\n 'description': 'Columbus Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-11-11',\n 'description': 'Veterans Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-11-23',\n 'description': 'Thanksgiving Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-11-24',\n 'description': 'Day after Thanksgiving',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-12-24',\n 'description': 'Christmas Eve',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.329084575176239, "alphanum_fraction": 0.37504827976226807, "avg_line_length": 21.719297409057617, "blob_id": "e5c867c1e4c93606a5c3ba10b58f9cabe1dabdb6", "content_id": "90b8f2e0ae9e4e160577291088ab706a92aaf822", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2620, "license_type": "permissive", "max_line_length": 65, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Yılbaşı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-04-23',\n 'description': 'Ulusal Egemenlik ve Çocuk Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Emek ve Dayanışma Günü',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-13',\n 'description': 'Ramazan Bayramı (1. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-14',\n 'description': 'Ramazan Bayramı (2. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-15',\n 'description': 'Ramazan Bayramı (3. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-19',\n 'description': \"Atatürk'ü Anma, Gençlik ve Spor Bayramı\",\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-07-15',\n 'description': 'Demokrasi ve Milli Birlik Günü',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-07-20',\n 'description': 'Kurban Bayramı (1. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-07-21',\n 'description': 'Kurban Bayramı (2. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-07-22',\n 'description': 'Kurban Bayramı (3. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-07-23',\n 'description': 'Kurban Bayramı (4. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-08-30',\n 'description': 'Zafer Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-10-29',\n 'description': 'Cumhuriyet Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.320070743560791, "alphanum_fraction": 0.3648688495159149, "avg_line_length": 21.03896141052246, "blob_id": "9d64d43da4a214fbf731530beabf05d1ee684739", "content_id": "61c97d91b1a2d2a52c34a9eac2d61ddd1a808434", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3406, "license_type": "permissive", "max_line_length": 53, "num_lines": 154, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Año Nuevo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-01-11',\n 'description': 'Día de los Reyes Magos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-03-22',\n 'description': 'Día de San José',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-01',\n 'description': 'Jueves Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Viernes Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-04',\n 'description': 'Domingo de Pascua',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Día del Trabajo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-17',\n 'description': 'La Ascensión del Señor',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-06-07',\n 'description': 'Corpus Christi',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-06-14',\n 'description': 'El Sagrado Corazón de Jesús',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-07-05',\n 'description': 'San Pedro y San Pablo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-07-20',\n 'description': 'Grito de Independencia',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-08-07',\n 'description': 'Batalla de Boyacá',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-08-16',\n 'description': 'Asunción de la Virgen',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-10-18',\n 'description': 'Día de la Raza',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Todos los Santos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-11-15',\n 'description': 'Independencia de Cartagena',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-12-08',\n 'description': 'Inmaculada Concepción',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Navidad',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.30328208208084106, "alphanum_fraction": 0.35189032554626465, "avg_line_length": 20.305309295654297, "blob_id": "13475fbb5ba4bbd3c32de5273b2a7bc3812fcb75", "content_id": "00bda7e0dbc83eb837ed814368f676f6889163b8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4817, "license_type": "permissive", "max_line_length": 49, "num_lines": 226, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-01-06',\n 'description': 'Heilige drei Könige',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '2',\n 'type': 'RF'\n },\n {\n 'date': '2013-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '6',\n 'type': 'RF'\n },\n {\n 'date': '2013-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '7',\n 'type': 'RF'\n },\n {\n 'date': '2013-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '8',\n 'type': 'RF'\n },\n {\n 'date': '2013-03-29',\n 'description': 'Karfreitag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-03-31',\n 'description': 'Ostersonntag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Ostermontag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-01',\n 'description': 'Staatsfeiertag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-04',\n 'description': 'Florian',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '4',\n 'type': 'F'\n },\n {\n 'date': '2013-05-09',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-19',\n 'description': 'Pfingstsonntag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-20',\n 'description': 'Pfingstmontag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-30',\n 'description': 'Fronleichnam',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-09-24',\n 'description': 'Rupert',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '5',\n 'type': 'F'\n },\n {\n 'date': '2013-10-10',\n 'description': 'Tag der Volksabstimmung',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '2',\n 'type': 'F'\n },\n {\n 'date': '2013-10-26',\n 'description': 'Nationalfeiertag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-11-11',\n 'description': 'Martin',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '1',\n 'type': 'F'\n },\n {\n 'date': '2013-11-15',\n 'description': 'Leopold',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '9',\n 'type': 'F'\n },\n {\n 'date': '2013-11-15',\n 'description': 'Leopold',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '3',\n 'type': 'F'\n },\n {\n 'date': '2013-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-24',\n 'description': 'Heiliger Abend',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-25',\n 'description': 'Christtag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-26',\n 'description': 'Stefanitag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-12-31',\n 'description': 'Silvester',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31509268283843994, "alphanum_fraction": 0.3614298403263092, "avg_line_length": 20.38679313659668, "blob_id": "0df5c7c33480d191e4b83967155aab2439dc0f1b", "content_id": "442e317b634f379c7efe5f3d17269f852fbc4701", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2293, "license_type": "permissive", "max_line_length": 42, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': 'Uudenvuodenpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-01-06',\n 'description': 'Loppiainen',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-03-25',\n 'description': 'Pitkäperjantai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-27',\n 'description': 'Pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-28',\n 'description': '2. pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-01',\n 'description': 'Vappu',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-05',\n 'description': 'Helatorstai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-15',\n 'description': 'Helluntaipäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-06-25',\n 'description': 'Juhannuspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-11-05',\n 'description': 'Pyhäinpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-12-06',\n 'description': 'Itsenäisyyspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-12-25',\n 'description': 'Joulupäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'Tapaninpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3314873278141022, "alphanum_fraction": 0.37579113245010376, "avg_line_length": 21.1842098236084, "blob_id": "f86d0f54e86afa4a0747f71efe658b12548c7b38", "content_id": "23e61e1f218561b6870cca4d471ecda7829a21c1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2768, "license_type": "permissive", "max_line_length": 93, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Πρωτοχρονιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-01-06',\n 'description': 'Θεοφάνεια',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-03-15',\n 'description': 'Καθαρά Δευτέρα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-03-25',\n 'description': 'Ευαγγελισμός της Θεοτόκου και Εθνική Ημέρα Ανεξαρτησίας της Ελλάδας',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-04-30',\n 'description': 'Μεγάλη Παρασκευή',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Μεγάλο Σάββατο',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Πρωτομαγιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-02',\n 'description': 'Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-03',\n 'description': 'Δευτέρα του Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-06-21',\n 'description': 'Δευτέρα του Αγίου Πνεύματος',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-08-15',\n 'description': 'Κοίμηση της Θεοτόκου',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-10-28',\n 'description': 'Ημέρα του ΌΧΙ',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Χριστούγεννα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Επόμενη ημέρα Χριστουγέννων',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.30687829852104187, "alphanum_fraction": 0.35305434465408325, "avg_line_length": 20.224489212036133, "blob_id": "6a0aeb400d34d0db5f0a4c3badb655692056bff6", "content_id": "1142a0e0c6f497eeac8c96f03cdebabdab0f0faf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2086, "license_type": "permissive", "max_line_length": 45, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Nouvel An',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-04-17',\n 'description': 'Pâques',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Fête du Travail',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-26',\n 'description': 'Ascension',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-05',\n 'description': 'Pentecôte',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Lundi de Pentecôte',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-07-21',\n 'description': 'Fête nationale',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Assomption',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Toussaint',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-11-11',\n 'description': \"Jour de l'armistice\",\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Noël',\n 'locale': 'fr-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3191489279270172, "alphanum_fraction": 0.3645389974117279, "avg_line_length": 20.59183692932129, "blob_id": "54bafba2d57960abf8fd0974530a226256439bb0", "content_id": "f942d178b716287f6b272581af056c74951b7893", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2115, "license_type": "permissive", "max_line_length": 53, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_BE-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': 'Nieuwjaar',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-03-27',\n 'description': 'Pasen',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-28',\n 'description': 'Paasmaandag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-01',\n 'description': 'Dag van de arbeid',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-05',\n 'description': 'Onze Lieve Heer hemelvaart',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-15',\n 'description': 'Pinksteren',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-16',\n 'description': 'Pinkstermaandag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-07-21',\n 'description': 'Nationale feestdag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-08-15',\n 'description': 'Onze Lieve Vrouw hemelvaart',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-11-11',\n 'description': 'Wapenstilstand',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-12-25',\n 'description': 'Kerstmis',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31116390228271484, "alphanum_fraction": 0.3605700731277466, "avg_line_length": 20.489795684814453, "blob_id": "ff9d3223cd4085ba836eedc7f620d10324c646f4", "content_id": "f641091d42f8105790362ca9c60dd3bf575e337e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2128, "license_type": "permissive", "max_line_length": 54, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2012] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2012-01-01',\n 'description': 'Újév',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-03-15',\n 'description': 'Az 1848-as forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-04-08',\n 'description': 'Húsvét',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-09',\n 'description': 'Húsvéthétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-01',\n 'description': 'A munka ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-27',\n 'description': 'Pünkösd',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-28',\n 'description': 'Pünkösdhétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-08-20',\n 'description': 'Az államalapítás ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-10-23',\n 'description': 'Az 1956-os forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-11-01',\n 'description': 'Mindenszentek',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-25',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-26',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3218020796775818, "alphanum_fraction": 0.36685439944267273, "avg_line_length": 20.8157901763916, "blob_id": "34d3e0ea5c2554b558897536c77f1f3fa24ed088", "content_id": "5dbca408822045e457da1fb7411fa9f40f2ca05e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2493, "license_type": "permissive", "max_line_length": 56, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': 'Novo leto',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-02-08',\n 'description': 'Prešernov dan',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-04-05',\n 'description': 'Velikonočna nedelja',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-06',\n 'description': 'Velikonočni ponedeljek',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-27',\n 'description': 'Dan upora proti okupatorju',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-01',\n 'description': 'Praznik dela',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-02',\n 'description': 'Praznik dela',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-25',\n 'description': 'Binkošti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-06-25',\n 'description': 'Dan državnosti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-08-15',\n 'description': 'Marijino vnebovzetje',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-10-31',\n 'description': 'Dan reformacije',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-11-01',\n 'description': 'Dan spomina na mrtve',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Božič',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-26',\n 'description': 'Dan samostojnosti in enotnosti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31993502378463745, "alphanum_fraction": 0.3654080331325531, "avg_line_length": 20.61403465270996, "blob_id": "2d83ab0e180ad7ecec5bf737747c93e9fc673140", "content_id": "12579e2a548fa819be8a4a72ab7deb84e2ba89cf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2463, "license_type": "permissive", "max_line_length": 44, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_NL-2017] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2017-01-01',\n 'description': 'Nieuwjaarsdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-04-14',\n 'description': 'Goede Vrijdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-16',\n 'description': 'Eerste Paasdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-17',\n 'description': 'Tweede Paasdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-27',\n 'description': 'Koningsdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2017-05-04',\n 'description': 'Dodenherdenking',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-05-05',\n 'description': 'Bevrijdingsdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-05-25',\n 'description': 'Hemelvaartsdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-04',\n 'description': 'Eerste Pinksterdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-05',\n 'description': 'Tweede Pinksterdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-12-05',\n 'description': 'Sinterklaas',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-15',\n 'description': 'Koninkrijksdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2017-12-25',\n 'description': 'Eerste Kerstdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-26',\n 'description': 'Tweede Kerstdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3381712734699249, "alphanum_fraction": 0.3817126154899597, "avg_line_length": 21.598360061645508, "blob_id": "af05c2bfe69ec2b74a06c5c41b34677e94e3506c", "content_id": "64fb3dc0902c6a6ce9169de1dbba8e3d36d75670", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2790, "license_type": "permissive", "max_line_length": 58, "num_lines": 122, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Deň vzniku Slovenskej republiky',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-01-06',\n 'description': 'Zjavenie Pána / Traja králi',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Veľký piatok',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Veľkonočný pondelok',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Sviatok práce',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-08',\n 'description': 'Deň víťazstva nad fašizmom',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-07-05',\n 'description': 'Sviatok svätého Cyrila a Metoda',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-08-29',\n 'description': 'Výročie SNP',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-09-01',\n 'description': 'Deň Ústavy Slovenskej republiky',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-09-15',\n 'description': 'Sedembolestná Panna Mária',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Sviatok všetkých svätých',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-11-17',\n 'description': 'Deň boja za slobodu a demokraciu',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-24',\n 'description': 'Štedrý deň',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Prvý sviatok vianočný',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Druhý sviatok vianočný',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3346364200115204, "alphanum_fraction": 0.3784206509590149, "avg_line_length": 21.447368621826172, "blob_id": "558616688592b5888da78829b3d83a86433dd2f6", "content_id": "a93182edeb77e0f34993ee8c68191dbf3fe9d1e6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2569, "license_type": "permissive", "max_line_length": 108, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hr_HR-2020] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2020-01-01',\n 'description': 'Nova Godina',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-01-06',\n 'description': 'Sveta tri kralja',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-04-12',\n 'description': 'Uskrs',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Uskršnji ponedjeljak',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-01',\n 'description': 'Praznik rada',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-30',\n 'description': 'Dan državnosti',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-06-11',\n 'description': 'Tijelovo',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-06-22',\n 'description': 'Dan antifašističke borbe',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-08-05',\n 'description': 'Dan pobjede i domovinske zahvalnosti i Dan hrvatskih branitelja',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-08-15',\n 'description': 'Velika Gospa',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-11-01',\n 'description': 'Dan svih svetih',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-11-18',\n 'description': 'Dan sjećanja na žrtve Domovinskog rata i Dan sjećanja na žrtvu Vukovara i Škabrnje',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-12-25',\n 'description': 'Božić',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-26',\n 'description': 'Sveti Stjepan',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.624790608882904, "alphanum_fraction": 0.624790608882904, "avg_line_length": 27.428571701049805, "blob_id": "af6da3fdc926a1c7fd6917714d0c85a09ce6b906", "content_id": "48bb3dfdb736f27760b6cc524c290c062f45b217", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 597, "license_type": "permissive", "max_line_length": 111, "num_lines": 21, "path": "/src/holidata/plugin.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "\"\"\"\nProvides base class for plugins.\n\"\"\"\n\n\nclass PluginMount(type):\n \"\"\"\n Metaclass that makes a given class plugin mount.\n All classes inheriting will be referenced in the 'plugins' attribute.\n \"\"\"\n\n def __init__(cls, name, bases, attrs):\n super(PluginMount, cls).__init__(name, bases, attrs)\n\n if not hasattr(cls, \"plugins\"):\n cls.plugins = []\n else:\n cls.plugins.append(cls)\n\n def get_plugin(cls, identifier, attribute):\n return next(iter([plugin for plugin in cls.plugins if getattr(plugin, attribute) == identifier]), None)\n" }, { "alpha_fraction": 0.30519479513168335, "alphanum_fraction": 0.35304170846939087, "avg_line_length": 20.21014404296875, "blob_id": "2bb73fb07d91b1c9be0c29c3b5d52427d076c9cc", "content_id": "61f9fdcbce3522a83be087c053f3ff88865405f2", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2934, "license_type": "permissive", "max_line_length": 46, "num_lines": 138, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Nyttårsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-02-27',\n 'description': 'Fastelavn',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-10',\n 'description': 'Palmesøndag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-14',\n 'description': 'Skjærtorsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Langfredag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-17',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Offentlig Høytidsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-08',\n 'description': 'Frigjøringsdag 1945',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-17',\n 'description': 'Grunnlovsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-26',\n 'description': 'Kristi Himmelfartsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-05',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-12-24',\n 'description': 'Julaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-31',\n 'description': 'Nyttårsaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31833910942077637, "alphanum_fraction": 0.36332181096076965, "avg_line_length": 20.820755004882812, "blob_id": "4fbc3cfb1d25c6d320e3448d6f65ebef83c4dabd", "content_id": "ff7affab607a1b3a4a0cd09fcc3522e110423af2", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2312, "license_type": "permissive", "max_line_length": 59, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-03-21',\n 'description': 'Human Rights Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-04-03',\n 'description': 'Good Friday',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-06',\n 'description': 'Family Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-27',\n 'description': 'Freedom Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-01',\n 'description': \"Worker's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-06-16',\n 'description': 'Youth Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-08-09',\n 'description': \"National Women's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-08-10',\n 'description': \"National Women's Day (Supplement)\",\n 'locale': 'en-ZA',\n 'notes': 'Supplement holiday',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-09-24',\n 'description': 'Heritage Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-16',\n 'description': 'Day of Reconciliation',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-26',\n 'description': 'Day of Goodwill',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31509268283843994, "alphanum_fraction": 0.3614298403263092, "avg_line_length": 20.38679313659668, "blob_id": "b1aaca2bdf9da28d0ac7831b5ad1a4716af66f5c", "content_id": "e153cd6daff8c43343ab6afc0df2c3ee5798f4e7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2293, "license_type": "permissive", "max_line_length": 42, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Uudenvuodenpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-01-06',\n 'description': 'Loppiainen',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Pitkäperjantai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-04',\n 'description': 'Pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-05',\n 'description': '2. pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Vappu',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-13',\n 'description': 'Helatorstai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-23',\n 'description': 'Helluntaipäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-06-26',\n 'description': 'Juhannuspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-11-06',\n 'description': 'Pyhäinpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-12-06',\n 'description': 'Itsenäisyyspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Joulupäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Tapaninpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.320070743560791, "alphanum_fraction": 0.3648688495159149, "avg_line_length": 21.03896141052246, "blob_id": "407a62bdb07afdbbc079b4530278a3b4877d5817", "content_id": "a75d3165f47aac3c1a3c555354a544c263d7aee3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3406, "license_type": "permissive", "max_line_length": 53, "num_lines": 154, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': 'Año Nuevo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-01-12',\n 'description': 'Día de los Reyes Magos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-03-23',\n 'description': 'Día de San José',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-02',\n 'description': 'Jueves Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-03',\n 'description': 'Viernes Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-05',\n 'description': 'Domingo de Pascua',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-01',\n 'description': 'Día del Trabajo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-18',\n 'description': 'La Ascensión del Señor',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-06-08',\n 'description': 'Corpus Christi',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-06-15',\n 'description': 'El Sagrado Corazón de Jesús',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-06-29',\n 'description': 'San Pedro y San Pablo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-07-20',\n 'description': 'Grito de Independencia',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-08-07',\n 'description': 'Batalla de Boyacá',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-08-17',\n 'description': 'Asunción de la Virgen',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-10-12',\n 'description': 'Día de la Raza',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2015-11-02',\n 'description': 'Todos los Santos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-11-16',\n 'description': 'Independencia de Cartagena',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2015-12-08',\n 'description': 'Inmaculada Concepción',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Navidad',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.6007416844367981, "alphanum_fraction": 0.6687268018722534, "avg_line_length": 28.962963104248047, "blob_id": "fb2c42221341daae2083f02cb8d7de647c5a1aa5", "content_id": "5089b1909e728ba16095b8631c3ec18c72a598ff", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 844, "license_type": "permissive", "max_line_length": 49, "num_lines": 27, "path": "/src/holidata/holidays/sk-SK.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom .holidays import Locale\n\n\nclass sk_SK(Locale):\n \"\"\"\n 01-01: [NF] Deň vzniku Slovenskej republiky\n 01-06: [NRF] Zjavenie Pána / Traja králi\n 05-01: [NF] Sviatok práce\n 05-08: [NF] Deň víťazstva nad fašizmom\n 07-05: [NRF] Sviatok svätého Cyrila a Metoda\n 08-29: [NF] Výročie SNP\n 09-01: [NF] Deň Ústavy Slovenskej republiky\n 09-15: [NRF] Sedembolestná Panna Mária\n 11-01: [NRF] Sviatok všetkých svätých\n 11-17: [NF] Deň boja za slobodu a demokraciu\n 12-24: [NRF] Štedrý deň\n 12-25: [NRF] Prvý sviatok vianočný\n 12-26: [NRF] Druhý sviatok vianočný\n 2 days before Easter: [NRV] Veľký piatok\n 1 day after Easter: [NRV] Veľkonočný pondelok\n \"\"\"\n\n locale = \"sk-SK\"\n easter_type = EASTER_WESTERN\n" }, { "alpha_fraction": 0.31302428245544434, "alphanum_fraction": 0.35894039273262024, "avg_line_length": 20.37735939025879, "blob_id": "9d3f7eedc83e13e2bd6f48d7a57d86be8745ded7", "content_id": "05514f03ace00b252d0aba1889703b5fb80e9e5e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2273, "license_type": "permissive", "max_line_length": 47, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2017] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2017-01-01',\n 'description': 'Nyårsdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-01-06',\n 'description': 'Trettondedagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-04-14',\n 'description': 'Långfredagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-16',\n 'description': 'Påskdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-17',\n 'description': 'Annandag påsk',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-05-01',\n 'description': 'Första maj',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-05-25',\n 'description': 'Kristi himmelfärdsdag',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-04',\n 'description': 'Pingst',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-24',\n 'description': 'Midsommardagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-11-04',\n 'description': 'Alla helgons dag',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-12-06',\n 'description': 'Självständighetsdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-12-25',\n 'description': 'Juldagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-26',\n 'description': 'Annandag jul',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.5982142686843872, "alphanum_fraction": 0.5982142686843872, "avg_line_length": 15, "blob_id": "a0d8fe386b8e37563007ace9cb5da2f85c0801fc", "content_id": "0ca6146d3c06859543d075b8ff49c4774207d64f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 112, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/IS.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass IS(Country):\n id = \"IS\"\n languages = [\"is\"]\n default_lang = \"is\"\n" }, { "alpha_fraction": 0.3293954133987427, "alphanum_fraction": 0.37665045261383057, "avg_line_length": 21.146154403686523, "blob_id": "cbc8840311e0277f8ca56d4adaf5c0f1d13683d5", "content_id": "0ea39e5993862d45211e5d1bc3395c0e1061691e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2900, "license_type": "permissive", "max_line_length": 49, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': 'Nýársdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-04-06',\n 'description': 'Skírdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-07',\n 'description': 'Föstudagurinn langi',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-09',\n 'description': 'Páskadagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-10',\n 'description': 'Annar dagur páska',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-20',\n 'description': 'Sumardagurinn fyrsti',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-05-01',\n 'description': 'Verkalýðsdagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-18',\n 'description': 'Uppstigningardagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-28',\n 'description': 'Hvítasunnudagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-29',\n 'description': 'Annar dagur hvítasunnu',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-06-17',\n 'description': 'Þjóðhátíðardagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-08-07',\n 'description': 'Frídagur verslunarmanna',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-12-24',\n 'description': 'Aðfangadagur jóla',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Jóladagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-26',\n 'description': 'Annar dagur jóla',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-31',\n 'description': 'Gamlársdagur',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3047436475753784, "alphanum_fraction": 0.3545759320259094, "avg_line_length": 20.306121826171875, "blob_id": "7d834d94af7bc08c16a04f3a52d752968f9c5455", "content_id": "2dfedff7391445c117f445e98093f3fb6ce0b196", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2093, "license_type": "permissive", "max_line_length": 45, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': \"Jour de l'an\",\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Fête du premier mai',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-08',\n 'description': 'Armistice 1945',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-26',\n 'description': 'Ascension',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-05',\n 'description': 'Pentecôte',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Lundi de Pentecôte',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-07-14',\n 'description': 'Fête nationale',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Assomption',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Toussaint',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-11-11',\n 'description': 'Armistice 1918',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Noël',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3186638355255127, "alphanum_fraction": 0.3643445670604706, "avg_line_length": 20.852941513061523, "blob_id": "50f6bfe1972483fa660917293d1a2713f91e41f9", "content_id": "547f365fa1d2fbe493ca840ccaa8bd4b127c5bcb", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 24547, "license_type": "permissive", "max_line_length": 45, "num_lines": 1122, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_CH-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Neujahrstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-01-02',\n 'description': 'Berchtoldstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BE',\n 'type': 'F'\n },\n {\n 'date': '2022-01-02',\n 'description': 'Berchtoldstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'F'\n },\n {\n 'date': '2022-01-02',\n 'description': 'Berchtoldstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'F'\n },\n {\n 'date': '2022-01-02',\n 'description': 'Berchtoldstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VD',\n 'type': 'F'\n },\n {\n 'date': '2022-01-06',\n 'description': 'Heilige Drei Könige',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2022-01-06',\n 'description': 'Heilige Drei Könige',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2022-01-06',\n 'description': 'Heilige Drei Könige',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2022-03-19',\n 'description': 'Josefstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RF'\n },\n {\n 'date': '2022-03-19',\n 'description': 'Josefstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2022-03-19',\n 'description': 'Josefstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2022-03-19',\n 'description': 'Josefstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2022-03-19',\n 'description': 'Josefstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VS',\n 'type': 'RF'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AG',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BE',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BL',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BS',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'FR',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GE',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GL',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GR',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NE',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'OW',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SG',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SH',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SO',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VD',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZG',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZH',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-17',\n 'description': 'Ostersonntag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BE',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BL',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BS',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GE',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GL',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GR',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SG',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SH',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VD',\n 'type': 'RV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZH',\n 'type': 'RV'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BL',\n 'type': 'F'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BS',\n 'type': 'F'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GR',\n 'type': 'F'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NE',\n 'type': 'F'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SH',\n 'type': 'F'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'F'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'F'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZH',\n 'type': 'F'\n },\n {\n 'date': '2022-05-26',\n 'description': 'Auffahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-05',\n 'description': 'Pfingstsonntag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BE',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BL',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BS',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GE',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GL',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GR',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SG',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SH',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VD',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZH',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-16',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-16',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-16',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-16',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-16',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'OW',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-16',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-16',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-16',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-16',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VS',\n 'type': 'RV'\n },\n {\n 'date': '2022-06-16',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZG',\n 'type': 'RV'\n },\n {\n 'date': '2022-08-01',\n 'description': 'Bundesfeier',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RF'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RF'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RF'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RF'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'OW',\n 'type': 'RF'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VS',\n 'type': 'RF'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZG',\n 'type': 'RF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GL',\n 'type': 'RF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'OW',\n 'type': 'RF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SG',\n 'type': 'RF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VS',\n 'type': 'RF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZG',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'OW',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VS',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZG',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Weihnachtstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BE',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BL',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BS',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GL',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GR',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SG',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SH',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZH',\n 'type': 'RF'\n }\n]" }, { "alpha_fraction": 0.5332059264183044, "alphanum_fraction": 0.6691352128982544, "avg_line_length": 40.4455451965332, "blob_id": "b89a7367893d67ddb2197bb2e822caaa3d042285", "content_id": "920b7ac9e6b5d5e880b77fa4ce5d98ad22e1bc12", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4273, "license_type": "permissive", "max_line_length": 98, "num_lines": 101, "path": "/src/holidata/holidays/pt-BR.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom .holidays import Locale\n\n\n# Information gathered from Brazilian legislation.\n\n# National holidays:\n# Law 10.607/2002: http://www.planalto.gov.br/ccivil_03/leis/2002/l10607.htm\n# Law 6.802/1980: http://www.planalto.gov.br/ccivil_03/leis/l6802.htm\n\n# State holidays\n# AC: State laws 1.538/2004, 1.411/2001, 14/1964, 1.526/2004, and 57/1965\n# AL: State laws 5.508/1993, 5.509/1993, 5.724/1995, and Decree 68.782 (30/dec/2019)\n# AP: State law 667/2002 and Art. 355 of the Constitution of the Federative Republic of Brazil\n# AM: State laws 25/1977 and 84/2010\n# BA: Art. 6 of the Constitution of the Federative Republic of Brazil\n# CE: State laws 9.093/1995, 9.093/1995 and Art. 18 of the Constitution of the State of Ceará\n# DF: District law 963/1995\n# ES: State law 11.010/2019\n# GO: State law 20.756/2020\n# MA: State law 2.457/1964\n# MT: State law 7.879/2002\n# MS: State law 10/1979\n# MG: Art. 256 of the Constitution of the State of Minas Gerais\n# PA: State law 5.999/1996\n# PB: State law 3.489/1967\n# PR: State law 4.658/1962\n# PE: State law 13.835/2009\n# PI: State law 176/1937\n# RJ: State laws 5.243/2008, 5.198/2008, and 4.007/2002\n# RN: State laws 7.831/2000 and 8.913/2006\n# RS: Art. 6 of the Constitution of the State of Rio Grande do Sul and Decree 36.180 (18/set/1995)\n# RO: State laws 2.291/2010 and 1.026/2001\n# RR: Art. 9 of the Constitution of the State of Roraima\n# SC: State laws 12.906/2004 and 10.306/1996\n# SP: State law 9.497/1997\n# SE: Art. 269 of the Constitution of the State of Sergipe\n# TO: State laws 98/1989, 960/1998, 627/1993\n\n\nclass pt_BR(Locale):\n \"\"\"\n 01-01: [NF] Confraternização Universal\n 01-04: [RO] [F] Criação do Estado de Rondônia\n 01-23: [AC] [RF] Dia do Evangélico no Acre\n 03-06: [PE] [F] Revolução Pernambucana de 1817\n 03-08: [AC] [F] Dia Internacional da Mulher\n 03-18: [TO] [F] Autonomia do Estado de Tocantins\n 03-19: [AP, CE] [RF] Dia de São José\n 03-25: [CE] [F] Abolição da Escravidão no Ceará\n 04-21: [DF] [F] Fundação de Brasília\n 04-21: [MG] [F] Execução de Tiradentes\n 04-21: [NF] Tiradentes\n 04-23: [RJ] [RF] Dia de São Jorge\n 05-01: [NF] Dia Internacional do Trabalhador\n 06-15: [AC] [F] Aniversário do Estado do Acre\n 06-18: [RO] [RF] Dia do Evangélico em Rondônia\n 06-24: [AL, PE] [RF] São João\n 06-29: [AL] [RF] São Pedro\n 07-02: [BA] [F] Independência da Bahia\n 07-08: [SE] [F] Emancipação Política de Sergipe\n 07-09: [SP] [F] Revolução Constitucionalista de 1932\n 07-26: [GO] [F] Fundação da Cidade de Goiás\n 07-28: [MA] [F] Adesão do Maranhão à Independência do Brasil\n 08-05: [PB] [F] Fundação do Estado da Paraíba\n 08-07: [RN] [F] Dia do Rio Grande do Norte\n 08-11: [SC] [F] Dia de Santa Catarina\n 08-15: [CE] [RF] Dia de Nossa Senhora da Assunção\n 08-15: [PA] [F] Adesão do Pará à Independência do Brasil\n 09-05: [AC] [F] Dia da Amazônia\n 09-05: [AM] [F] Elevação do Amazonas à Categoria de Província\n 09-07: [NF] Independência do Brasil\n 09-08: [TO] [F] Nossa Senhora da Natividade\n 09-13: [AP] [F] Criação do Território Federal do Amapá\n 09-16: [AL] [F] Emancipação Política do Alagoas\n 09-20: [RS] [F] Dia do Gaúcho\n 10-03: [RN] [F] Mártires de Cunhaú e Uruaçu\n 10-05: [RR] [F] Criação dos Estado de Roraima\n 10-05: [TO] [F] Criação dos Estado de Tocantins\n 10-11: [MS] [F] Criação do Estado do Mato Grosso do Sul\n 10-12: [NRF] Nossa Senhora Aparecida\n 10-19: [PI] [F] Dia do Piauí\n 10-24: [GO] [F] Pedra Fundamental de Goiânia\n 11-02: [NRF] Finados\n 11-15: [NF] Proclamação da República\n 11-17: [AC] [F] Assinatura do Tratado de Petrópolis\n 11-20: [AL] [F] Morte de Zumbi dos Palmares\n 11-20: [AM, MT, RJ] [F] Dia da Consciência Negra\n 11-25: [SC] [RF] Dia de Santa Catarina de Alexandria\n 11-30: [DF] [RF] Dia do Evangélico do Distrito Federal\n 12-08: [AM] [RF] Nossa Senhora da Conceição\n 12-19: [PR] [F] Emancipação Política do Estado do Paraná\n 12-25: [NRF] Natal\n 47 days before Easter: [NRV] Carnaval\n Easter: [NRV] Páscoa\n \"\"\"\n\n locale = \"pt-BR\"\n easter_type = EASTER_WESTERN\n" }, { "alpha_fraction": 0.32159623503685, "alphanum_fraction": 0.36760562658309937, "avg_line_length": 20.744897842407227, "blob_id": "7fbdd71f9faa2fd2bafd1181e315434099a7ec76", "content_id": "eef4df831eb7774596c45ff7d8d7fd4b1402dafd", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2149, "license_type": "permissive", "max_line_length": 67, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': 'Uusaasta',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-02-24',\n 'description': 'Iseseisvuspäev, Eesti Vabariigi aastapäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-03-25',\n 'description': 'Suur reede',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-27',\n 'description': 'Ülestõusmispühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-01',\n 'description': 'Kevadpüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-15',\n 'description': 'Nelipühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-06-23',\n 'description': 'Võidupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-06-24',\n 'description': 'Jaanipäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-08-20',\n 'description': 'Taasiseseisvumispäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-12-24',\n 'description': 'Jõululaupäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-12-25',\n 'description': 'Esimene jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'Teine jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3250230848789215, "alphanum_fraction": 0.36934441328048706, "avg_line_length": 21.112245559692383, "blob_id": "12b2b0868dfc9a5c5fff755679521ab5b4344a8f", "content_id": "b1fa9f8b2d496becce2dd8ae69e8bffab954dc2e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2166, "license_type": "permissive", "max_line_length": 61, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-01-03',\n 'description': \"New Year's Day (observed)\",\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Good Friday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Easter Monday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-02',\n 'description': 'Early May Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-06-02',\n 'description': 'Spring Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-06-03',\n 'description': \"Queen's Platinum Jubilee\",\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-08-29',\n 'description': 'August Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-09-19',\n 'description': 'State Funeral of Queen Elizabeth II',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Boxing Day',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-27',\n 'description': 'Christmas Day (observed)',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n }\n]" }, { "alpha_fraction": 0.3136601746082306, "alphanum_fraction": 0.3593526780605316, "avg_line_length": 20.4489803314209, "blob_id": "ecb309f53104253c98b26fb9526b89f3db386b2d", "content_id": "304ed135e8e66d35aeea869740c6139849817b3a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2101, "license_type": "permissive", "max_line_length": 49, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[it_IT-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': 'Capodanno',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-01-06',\n 'description': 'Epifania',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-04-09',\n 'description': 'Pasqua',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-10',\n 'description': 'Pasquetta',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-25',\n 'description': 'Festa della liberazione',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-01',\n 'description': 'Festa del lavoro',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-06-02',\n 'description': 'Festa della repubblica',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-08-15',\n 'description': 'Assunzione (ferragosto)',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-11-01',\n 'description': 'Ognissanti',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-08',\n 'description': 'Immacolata concezione',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Natale',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-26',\n 'description': 'S.to Stefano',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3314873278141022, "alphanum_fraction": 0.37579113245010376, "avg_line_length": 21.1842098236084, "blob_id": "45ee59581c4e3ac61fc1d3ececfccd4989557ae7", "content_id": "c35ce09f6f92ac8fee005c60e99e23a86b7ca789", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2768, "license_type": "permissive", "max_line_length": 93, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2018] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2018-01-01',\n 'description': 'Πρωτοχρονιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-01-06',\n 'description': 'Θεοφάνεια',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-02-19',\n 'description': 'Καθαρά Δευτέρα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-03-25',\n 'description': 'Ευαγγελισμός της Θεοτόκου και Εθνική Ημέρα Ανεξαρτησίας της Ελλάδας',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-04-06',\n 'description': 'Μεγάλη Παρασκευή',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-07',\n 'description': 'Μεγάλο Σάββατο',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-08',\n 'description': 'Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-09',\n 'description': 'Δευτέρα του Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-05-01',\n 'description': 'Πρωτομαγιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-05-28',\n 'description': 'Δευτέρα του Αγίου Πνεύματος',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-08-15',\n 'description': 'Κοίμηση της Θεοτόκου',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-10-28',\n 'description': 'Ημέρα του ΌΧΙ',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-12-25',\n 'description': 'Χριστούγεννα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-12-26',\n 'description': 'Επόμενη ημέρα Χριστουγέννων',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.329084575176239, "alphanum_fraction": 0.37504827976226807, "avg_line_length": 21.719297409057617, "blob_id": "e00780dda0b3f112c4537b191670727ced28b040", "content_id": "c0bcc11613859c5dfdad5eb29ac878342c60297e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2620, "license_type": "permissive", "max_line_length": 65, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2017] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2017-01-01',\n 'description': 'Yılbaşı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-04-23',\n 'description': 'Ulusal Egemenlik ve Çocuk Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-05-01',\n 'description': 'Emek ve Dayanışma Günü',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-05-19',\n 'description': \"Atatürk'ü Anma, Gençlik ve Spor Bayramı\",\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-06-25',\n 'description': 'Ramazan Bayramı (1. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-26',\n 'description': 'Ramazan Bayramı (2. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-27',\n 'description': 'Ramazan Bayramı (3. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-07-15',\n 'description': 'Demokrasi ve Milli Birlik Günü',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-08-30',\n 'description': 'Zafer Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-09-01',\n 'description': 'Kurban Bayramı (1. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-09-02',\n 'description': 'Kurban Bayramı (2. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-09-03',\n 'description': 'Kurban Bayramı (3. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-09-04',\n 'description': 'Kurban Bayramı (4. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-10-29',\n 'description': 'Cumhuriyet Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.30265551805496216, "alphanum_fraction": 0.34921789169311523, "avg_line_length": 20.153846740722656, "blob_id": "1b1408eae6f1dcb92fd12d53f54e2317a7075c6c", "content_id": "758e59f5e43353c6ac779a444a0a005f4b077d2b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2753, "license_type": "permissive", "max_line_length": 42, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2017] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2017-01-01',\n 'description': 'Nyårsdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-01-06',\n 'description': 'Trettondedag jul',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-04-14',\n 'description': 'Långfredagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-16',\n 'description': 'Påskdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-17',\n 'description': 'Annandag påsk',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-05-01',\n 'description': 'Första maj',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-05-25',\n 'description': 'Kristi himmelsfärdsdag',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-04',\n 'description': 'Pingstdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-06',\n 'description': 'Nationaldagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-06-23',\n 'description': 'Midsommarafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2017-06-24',\n 'description': 'Midsommardagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2017-11-04',\n 'description': 'Alla helgons dag',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-12-24',\n 'description': 'Julafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-25',\n 'description': 'Juldagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-26',\n 'description': 'Annandag jul',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-31',\n 'description': 'Nyårsafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31142035126686096, "alphanum_fraction": 0.3574855923652649, "avg_line_length": 20.275510787963867, "blob_id": "f96fb630b33b5f0cc6652d079fa374cba0f74e94", "content_id": "2f4742c38fbbd6ac7b1f95314bf51267e9222fc6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2088, "license_type": "permissive", "max_line_length": 47, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': 'Nytårsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-04-21',\n 'description': 'Skærtorsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-22',\n 'description': 'Langfredag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-24',\n 'description': 'Påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-25',\n 'description': 'Anden påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-05-20',\n 'description': 'Store bededag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-02',\n 'description': 'Kristi himmelfartsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-05',\n 'description': 'Grundlovsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-12',\n 'description': 'Pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-13',\n 'description': 'Anden pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-26',\n 'description': 'Anden juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3322019577026367, "alphanum_fraction": 0.3763258457183838, "avg_line_length": 21.245283126831055, "blob_id": "811666baeb231e4b240f09e4b1e3d20ad05af185", "content_id": "3bdd5fcf9075bf03032ca2c0d5f953b74b68365e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2378, "license_type": "permissive", "max_line_length": 59, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pl_PL-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': 'Nowy Rok',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-01-06',\n 'description': 'Trzech Króli',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-04-21',\n 'description': 'Wielkanoc',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Poniedziałek Wielkanocny',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Święto Pracy',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-05-03',\n 'description': 'Święto Konstytucji Trzeciego Maja',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-06-09',\n 'description': 'Zielone Świątki',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-20',\n 'description': 'Boże Ciało',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-08-15',\n 'description': 'Wniebowzięcie Najświętszej Maryi Panny',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-11-01',\n 'description': 'Wszystkich Świętych',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-11-11',\n 'description': 'Narodowe Święto Niepodległości',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Boże Narodzenie (pierwszy dzień)',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Boże Narodzenie (drugi dzień)',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3224353492259979, "alphanum_fraction": 0.3677709996700287, "avg_line_length": 20.920381546020508, "blob_id": "78bcd79b62bcd1710fe859371ef313cecf270b86", "content_id": "7aa12628296c58f08f9c2c9931827d149b818ada", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6886, "license_type": "permissive", "max_line_length": 51, "num_lines": 314, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_DE-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-01-06',\n 'description': 'Heilige drei Könige',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BW',\n 'type': 'RF'\n },\n {\n 'date': '2023-01-06',\n 'description': 'Heilige drei Könige',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BY',\n 'type': 'RF'\n },\n {\n 'date': '2023-01-06',\n 'description': 'Heilige drei Könige',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'ST',\n 'type': 'RF'\n },\n {\n 'date': '2023-03-08',\n 'description': 'Internationaler Frauentag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BE',\n 'type': 'F'\n },\n {\n 'date': '2023-04-07',\n 'description': 'Karfreitag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-09',\n 'description': 'Ostern',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-10',\n 'description': 'Ostermontag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-01',\n 'description': 'Erster Maifeiertag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-18',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-28',\n 'description': 'Pfingstsonntag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-29',\n 'description': 'Pfingstmontag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-06-08',\n 'description': 'Fronleichnam',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BW',\n 'type': 'RV'\n },\n {\n 'date': '2023-06-08',\n 'description': 'Fronleichnam',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BY',\n 'type': 'RV'\n },\n {\n 'date': '2023-06-08',\n 'description': 'Fronleichnam',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'HE',\n 'type': 'RV'\n },\n {\n 'date': '2023-06-08',\n 'description': 'Fronleichnam',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RV'\n },\n {\n 'date': '2023-06-08',\n 'description': 'Fronleichnam',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'RP',\n 'type': 'RV'\n },\n {\n 'date': '2023-06-08',\n 'description': 'Fronleichnam',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'SL',\n 'type': 'RV'\n },\n {\n 'date': '2023-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'SL',\n 'type': 'RF'\n },\n {\n 'date': '2023-10-03',\n 'description': 'Tag der Deutschen Einheit',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BB',\n 'type': 'RF'\n },\n {\n 'date': '2023-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BH',\n 'type': 'RF'\n },\n {\n 'date': '2023-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'HH',\n 'type': 'RF'\n },\n {\n 'date': '2023-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'MV',\n 'type': 'RF'\n },\n {\n 'date': '2023-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'NI',\n 'type': 'RF'\n },\n {\n 'date': '2023-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'SH',\n 'type': 'RF'\n },\n {\n 'date': '2023-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'SN',\n 'type': 'RF'\n },\n {\n 'date': '2023-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'ST',\n 'type': 'RF'\n },\n {\n 'date': '2023-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'TH',\n 'type': 'RF'\n },\n {\n 'date': '2023-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BW',\n 'type': 'RF'\n },\n {\n 'date': '2023-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BY',\n 'type': 'RF'\n },\n {\n 'date': '2023-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RF'\n },\n {\n 'date': '2023-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'RP',\n 'type': 'RF'\n },\n {\n 'date': '2023-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'SL',\n 'type': 'RF'\n },\n {\n 'date': '2023-11-22',\n 'description': 'Buß- und Bettag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'SN',\n 'type': 'RV'\n },\n {\n 'date': '2023-12-24',\n 'description': 'Heilig Abend',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Weihnachtstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-26',\n 'description': 'Zweiter Weihnachtstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-31',\n 'description': 'Silvester',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3056092858314514, "alphanum_fraction": 0.3520309329032898, "avg_line_length": 20.112245559692383, "blob_id": "c52a4f5f62df9f822fd153a4cb8c027dd137b252", "content_id": "b29ec89b348dc3b81851fa52beeeb3df500fcb01", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2068, "license_type": "permissive", "max_line_length": 42, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-03-27',\n 'description': 'Ostern',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-28',\n 'description': 'Ostermontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-05',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-15',\n 'description': 'Pfingsten',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-16',\n 'description': 'Pfingstmontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-07-21',\n 'description': 'Nationalfeiertag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-11-11',\n 'description': 'Waffenstillstand',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-12-25',\n 'description': 'Weihnacht',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.30328208208084106, "alphanum_fraction": 0.35189032554626465, "avg_line_length": 20.305309295654297, "blob_id": "6792e4719b01bbfd0f896a83fe1c9536254e9195", "content_id": "03291369ea1e0089bd0e12d9aa8e28e3805db699", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4817, "license_type": "permissive", "max_line_length": 49, "num_lines": 226, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-01-06',\n 'description': 'Heilige drei Könige',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '2',\n 'type': 'RF'\n },\n {\n 'date': '2011-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '6',\n 'type': 'RF'\n },\n {\n 'date': '2011-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '7',\n 'type': 'RF'\n },\n {\n 'date': '2011-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '8',\n 'type': 'RF'\n },\n {\n 'date': '2011-04-22',\n 'description': 'Karfreitag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-24',\n 'description': 'Ostersonntag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-25',\n 'description': 'Ostermontag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-05-01',\n 'description': 'Staatsfeiertag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-05-04',\n 'description': 'Florian',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '4',\n 'type': 'F'\n },\n {\n 'date': '2011-06-02',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-12',\n 'description': 'Pfingstsonntag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-13',\n 'description': 'Pfingstmontag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-23',\n 'description': 'Fronleichnam',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-09-24',\n 'description': 'Rupert',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '5',\n 'type': 'F'\n },\n {\n 'date': '2011-10-10',\n 'description': 'Tag der Volksabstimmung',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '2',\n 'type': 'F'\n },\n {\n 'date': '2011-10-26',\n 'description': 'Nationalfeiertag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-11-11',\n 'description': 'Martin',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '1',\n 'type': 'F'\n },\n {\n 'date': '2011-11-15',\n 'description': 'Leopold',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '9',\n 'type': 'F'\n },\n {\n 'date': '2011-11-15',\n 'description': 'Leopold',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '3',\n 'type': 'F'\n },\n {\n 'date': '2011-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-24',\n 'description': 'Heiliger Abend',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Christtag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-26',\n 'description': 'Stefanitag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-12-31',\n 'description': 'Silvester',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.30519479513168335, "alphanum_fraction": 0.35304170846939087, "avg_line_length": 20.21014404296875, "blob_id": "6faa74ba7e8e38b1836a38314cb4ec3cba435e4f", "content_id": "43a951392bb369f387758fcc233293aa9e64ab8d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2934, "license_type": "permissive", "max_line_length": 46, "num_lines": 138, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Nyttårsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-03-02',\n 'description': 'Fastelavn',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-13',\n 'description': 'Palmesøndag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-17',\n 'description': 'Skjærtorsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Langfredag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-20',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Offentlig Høytidsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-08',\n 'description': 'Frigjøringsdag 1945',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-17',\n 'description': 'Grunnlovsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-29',\n 'description': 'Kristi Himmelfartsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-08',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-12-24',\n 'description': 'Julaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-31',\n 'description': 'Nyttårsaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31389227509498596, "alphanum_fraction": 0.35925474762916565, "avg_line_length": 20.66666603088379, "blob_id": "2eaf09e8c367199a4eaf14457911107b8f8c04ed", "content_id": "16e10a2d3e436b739ef807c899e648c10aab98aa", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2469, "license_type": "permissive", "max_line_length": 49, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2018] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2018-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-01-15',\n 'description': 'Birthday of Martin Luther King, Jr.',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-02-19',\n 'description': \"Washington's Birthday\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-04-16',\n 'description': \"Patriots' Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': 'MA',\n 'type': 'V'\n },\n {\n 'date': '2018-04-16',\n 'description': \"Patriots' Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': 'ME',\n 'type': 'V'\n },\n {\n 'date': '2018-05-28',\n 'description': 'Memorial Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-07-04',\n 'description': 'Independence Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-09-03',\n 'description': 'Labor Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-10-08',\n 'description': 'Columbus Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-11-11',\n 'description': 'Veterans Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-11-22',\n 'description': 'Thanksgiving Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-11-23',\n 'description': 'Day after Thanksgiving',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-12-24',\n 'description': 'Christmas Eve',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.30265551805496216, "alphanum_fraction": 0.34921789169311523, "avg_line_length": 20.153846740722656, "blob_id": "ce889e6cd7b7cbef8263dbac23b42ed6114dd5d8", "content_id": "23e29a2e8470f0c9415f32f6033ff888b7e14ab6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2753, "license_type": "permissive", "max_line_length": 42, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': 'Nyårsdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-01-06',\n 'description': 'Trettondedag jul',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-03-29',\n 'description': 'Långfredagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-03-31',\n 'description': 'Påskdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Annandag påsk',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-01',\n 'description': 'Första maj',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-09',\n 'description': 'Kristi himmelsfärdsdag',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-19',\n 'description': 'Pingstdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-06-06',\n 'description': 'Nationaldagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-06-21',\n 'description': 'Midsommarafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2013-06-22',\n 'description': 'Midsommardagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2013-11-02',\n 'description': 'Alla helgons dag',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-12-24',\n 'description': 'Julafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-25',\n 'description': 'Juldagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-26',\n 'description': 'Annandag jul',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-31',\n 'description': 'Nyårsafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.6136363744735718, "alphanum_fraction": 0.6136363744735718, "avg_line_length": 13.666666984558105, "blob_id": "acd8c28c995a9e0986eae2e1e342b0fb6e17de1d", "content_id": "09d9f2c704e0c345b2c6b795184719b45807564b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 88, "license_type": "permissive", "max_line_length": 29, "num_lines": 6, "path": "/src/holidata/holidays/ZA.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass ZA(Country):\n id = \"ZA\"\n languages = [\"en\"]\n" }, { "alpha_fraction": 0.320070743560791, "alphanum_fraction": 0.3648688495159149, "avg_line_length": 21.03896141052246, "blob_id": "cfdc8b1f58286ee58f6d31cbc0c1986169c560b6", "content_id": "820143f430e5f31a1c5d26b7ae9d692626cbe1e3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3406, "license_type": "permissive", "max_line_length": 53, "num_lines": 154, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2018] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2018-01-01',\n 'description': 'Año Nuevo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-01-08',\n 'description': 'Día de los Reyes Magos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-03-19',\n 'description': 'Día de San José',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-03-29',\n 'description': 'Jueves Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-03-30',\n 'description': 'Viernes Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-01',\n 'description': 'Domingo de Pascua',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-05-01',\n 'description': 'Día del Trabajo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-05-14',\n 'description': 'La Ascensión del Señor',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-06-04',\n 'description': 'Corpus Christi',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-06-11',\n 'description': 'El Sagrado Corazón de Jesús',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-07-02',\n 'description': 'San Pedro y San Pablo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-07-20',\n 'description': 'Grito de Independencia',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-08-07',\n 'description': 'Batalla de Boyacá',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-08-20',\n 'description': 'Asunción de la Virgen',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-10-15',\n 'description': 'Día de la Raza',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-11-05',\n 'description': 'Todos los Santos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-11-12',\n 'description': 'Independencia de Cartagena',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-12-08',\n 'description': 'Inmaculada Concepción',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-12-25',\n 'description': 'Navidad',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.43023255467414856, "alphanum_fraction": 0.48255813121795654, "avg_line_length": 20.5, "blob_id": "260cded4531fbb75e71f7d15429ce54a89a70afc", "content_id": "5892fe67cd92e101e0e946050c1e20bfab5cb2db", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 172, "license_type": "permissive", "max_line_length": 59, "num_lines": 8, "path": "/src/holidata/holidays/AT.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass AT(Country):\n id = \"AT\"\n languages = [\"de\"]\n default_lang = \"de\"\n regions = [\"1\", \"2\", \"3\", \"4\", \"5\", \"6\", \"7\", \"8\", \"9\"]\n" }, { "alpha_fraction": 0.32209739089012146, "alphanum_fraction": 0.3670412003993988, "avg_line_length": 20.66216278076172, "blob_id": "dff8942c35483fcda763c11f87cbe8699fd2fc43", "content_id": "94077c7188328db917bcc55dc77f510abdb9898c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1738, "license_type": "permissive", "max_line_length": 52, "num_lines": 74, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': 'Новый Год',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-01-07',\n 'description': 'Рождество Христово',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-02-23',\n 'description': 'День защитника Отечества',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-03-08',\n 'description': 'Международный женский день',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-04-24',\n 'description': 'Пасха',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-05-01',\n 'description': 'Праздник весны и труда',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-05-09',\n 'description': 'День Победы',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-12',\n 'description': 'День России',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-11-04',\n 'description': 'День народного единства',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3047436475753784, "alphanum_fraction": 0.3545759320259094, "avg_line_length": 20.306121826171875, "blob_id": "d76166381b41a88961afe1bc352fc17a2d924bee", "content_id": "be7f033b78c4bd46afb9ede3751fd172fa07ed2a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2093, "license_type": "permissive", "max_line_length": 45, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': \"Jour de l'an\",\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Fête du premier mai',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-08',\n 'description': 'Armistice 1945',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-29',\n 'description': 'Ascension',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-08',\n 'description': 'Pentecôte',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Lundi de Pentecôte',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-07-14',\n 'description': 'Fête nationale',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-08-15',\n 'description': 'Assomption',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Toussaint',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-11-11',\n 'description': 'Armistice 1918',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Noël',\n 'locale': 'fr-FR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.725813090801239, "alphanum_fraction": 0.7794815301895142, "avg_line_length": 91.33650970458984, "blob_id": "a98aa117566f0e1ecfc09e5050ee1553adfe0620", "content_id": "742eaf4b7d074e707c045985907f587ed7aa63b5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 87258, "license_type": "permissive", "max_line_length": 184, "num_lines": 945, "path": "/tests/snapshots/snap_test_holidata.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# snapshottest: v1 - https://goo.gl/zC4yUc\nfrom __future__ import unicode_literals\n\nfrom snapshottest import Snapshot\nfrom snapshottest.file import FileSnapshot\n\n\nsnapshots = Snapshot()\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[da_DK-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[da_DK-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[da_DK-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[da_DK-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[da_DK-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[da_DK-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[da_DK-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[da_DK-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[da_DK-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[da_DK-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[da_DK-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[da_DK-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[da_DK-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_AT-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_AT-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_AT-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_AT-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_AT-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_AT-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_AT-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_AT-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_AT-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_AT-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_AT-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_AT-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_AT-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_BE-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_BE-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_BE-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_BE-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_BE-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_BE-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_BE-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_BE-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_BE-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_BE-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_BE-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_BE-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_BE-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_CH-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_CH-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_CH-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_CH-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_CH-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_CH-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_CH-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_CH-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_CH-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_CH-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_CH-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_CH-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_CH-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_CH-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_CH-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_CH-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_CH-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_CH-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_CH-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_CH-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_CH-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_CH-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_CH-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_CH-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_CH-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_CH-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_DE-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_DE-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_DE-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_DE-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_DE-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_DE-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_DE-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_DE-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_DE-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_DE-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_DE-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_DE-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_DE-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_DE-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_DE-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_DE-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_DE-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_DE-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_DE-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_DE-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_DE-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_DE-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_DE-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_DE-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[de_DE-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_DE-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[el_GR-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[el_GR-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[el_GR-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[el_GR-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[el_GR-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[el_GR-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[el_GR-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[el_GR-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[el_GR-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[el_GR-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[el_GR-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[el_GR-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[el_GR-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_CA-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_CA-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_CA-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_CA-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_CA-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_CA-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_CA-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_CA-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_CA-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_CA-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_CA-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_CA-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_CA-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_CA-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_CA-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_CA-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_CA-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_CA-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_CA-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_CA-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_CA-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_CA-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_CA-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_CA-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_CA-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_CA-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_GB-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_GB-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_GB-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_GB-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_GB-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_GB-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_GB-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_GB-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_GB-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_GB-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_GB-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_GB-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_GB-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_NZ-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_NZ-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_NZ-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_NZ-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_NZ-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_NZ-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_NZ-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_NZ-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_NZ-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_NZ-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_NZ-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_NZ-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_NZ-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_US-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_US-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_US-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_US-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_US-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_US-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_US-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_US-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_US-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_US-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_US-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_US-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_US-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_ZA-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_ZA-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_ZA-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_ZA-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_ZA-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_ZA-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_ZA-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_ZA-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_ZA-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_ZA-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_ZA-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_ZA-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[en_ZA-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_CO-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_CO-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_CO-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_CO-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_CO-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_CO-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_CO-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_CO-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_CO-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_CO-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_CO-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_CO-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_CO-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_ES-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_ES-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_ES-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_ES-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_ES-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_ES-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_ES-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_ES-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_ES-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_ES-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_ES-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_ES-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_ES-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_US-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_US-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_US-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_US-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_US-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_US-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_US-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_US-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_US-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_US-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_US-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_US-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_US-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_US-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_US-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_US-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_US-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_US-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_US-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_US-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_US-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_US-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_US-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_US-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[es_US-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_US-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[et_EE-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[et_EE-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[et_EE-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[et_EE-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[et_EE-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[et_EE-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[et_EE-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[et_EE-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[et_EE-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[et_EE-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[et_EE-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[et_EE-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[et_EE-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fi_FI-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fi_FI-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fi_FI-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fi_FI-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fi_FI-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fi_FI-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fi_FI-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fi_FI-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fi_FI-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fi_FI-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fi_FI-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fi_FI-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fi_FI-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_BE-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_BE-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_BE-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_BE-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_BE-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_BE-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_BE-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_BE-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_BE-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_BE-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_BE-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_BE-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_BE-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_BE-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_CA-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_CA-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_CA-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_CA-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_CA-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_CA-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_CA-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_CA-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_CA-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_CA-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_CA-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_CA-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_CA-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_CA-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_CA-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_CA-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_CA-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_CA-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_CA-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_CA-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_CA-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_CA-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_CA-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_CA-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_CA-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_CA-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_FR-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_FR-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_FR-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_FR-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_FR-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_FR-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_FR-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_FR-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_FR-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_FR-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_FR-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_FR-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[fr_FR-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_FR-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hr_HR-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hr_HR-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hr_HR-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hr_HR-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hr_HR-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hr_HR-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hr_HR-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hr_HR-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hr_HR-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hr_HR-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hr_HR-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hr_HR-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hr_HR-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hr_HR-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hr_HR-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hr_HR-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hr_HR-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hr_HR-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hr_HR-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hr_HR-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hr_HR-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hr_HR-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hr_HR-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hr_HR-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hr_HR-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hr_HR-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hu_HU-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hu_HU-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hu_HU-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hu_HU-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hu_HU-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hu_HU-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hu_HU-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hu_HU-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hu_HU-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hu_HU-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hu_HU-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hu_HU-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[hu_HU-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[is_IS-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[is_IS-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[is_IS-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[is_IS-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[is_IS-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[is_IS-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[is_IS-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[is_IS-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[is_IS-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[is_IS-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[is_IS-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[is_IS-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[is_IS-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[it_IT-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[it_IT-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[it_IT-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[it_IT-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[it_IT-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[it_IT-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[it_IT-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[it_IT-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[it_IT-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[it_IT-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[it_IT-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[it_IT-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[it_IT-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[it_IT-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[it_IT-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[it_IT-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[it_IT-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[it_IT-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[it_IT-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[it_IT-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[it_IT-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[it_IT-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[it_IT-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[it_IT-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[it_IT-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[it_IT-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nb_NO-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nb_NO-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nb_NO-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nb_NO-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nb_NO-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nb_NO-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nb_NO-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nb_NO-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nb_NO-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nb_NO-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nb_NO-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nb_NO-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nb_NO-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_BE-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_BE-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_BE-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_BE-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_BE-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_BE-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_BE-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_BE-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_BE-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_BE-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_BE-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_BE-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_BE-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_BE-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_BE-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_BE-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_BE-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_BE-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_BE-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_BE-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_BE-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_BE-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_BE-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_BE-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_BE-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_BE-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_NL-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_NL-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_NL-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_NL-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_NL-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_NL-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_NL-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_NL-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_NL-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_NL-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_NL-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_NL-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_NL-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_NL-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_NL-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_NL-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_NL-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_NL-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_NL-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_NL-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_NL-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_NL-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_NL-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_NL-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[nl_NL-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_NL-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pl_PL-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pl_PL-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pl_PL-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pl_PL-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pl_PL-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pl_PL-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pl_PL-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pl_PL-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pl_PL-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pl_PL-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pl_PL-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pl_PL-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pl_PL-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pl_PL-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pl_PL-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pl_PL-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pl_PL-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pl_PL-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pl_PL-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pl_PL-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pl_PL-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pl_PL-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pl_PL-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pl_PL-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pl_PL-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pl_PL-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_BR-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_BR-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_BR-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_BR-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_BR-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_BR-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_BR-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_BR-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_BR-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_BR-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_BR-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_BR-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_BR-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_BR-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_BR-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_BR-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_BR-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_BR-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_BR-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_BR-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_BR-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_BR-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_BR-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_BR-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_BR-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_BR-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_PT-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_PT-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_PT-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_PT-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_PT-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_PT-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_PT-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_PT-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_PT-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_PT-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_PT-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_PT-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[pt_PT-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[ru_RU-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[ru_RU-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[ru_RU-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[ru_RU-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[ru_RU-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[ru_RU-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[ru_RU-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[ru_RU-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[ru_RU-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[ru_RU-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[ru_RU-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[ru_RU-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[ru_RU-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sk_SK-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sk_SK-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sk_SK-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sk_SK-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sk_SK-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sk_SK-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sk_SK-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sk_SK-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sk_SK-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sk_SK-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sk_SK-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sk_SK-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sk_SK-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sl_SI-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sl_SI-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sl_SI-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sl_SI-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sl_SI-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sl_SI-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sl_SI-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sl_SI-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sl_SI-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sl_SI-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sl_SI-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sl_SI-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sl_SI-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_FI-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_FI-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_FI-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_FI-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_FI-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_FI-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_FI-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_FI-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_FI-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_FI-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_FI-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_FI-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_FI-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_SE-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_SE-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_SE-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_SE-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_SE-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_SE-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_SE-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_SE-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_SE-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_SE-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_SE-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_SE-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[sv_SE-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2023] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[tr_TR-2011] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2011] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[tr_TR-2012] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2012] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[tr_TR-2013] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2013] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[tr_TR-2014] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2014] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[tr_TR-2015] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2015] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[tr_TR-2016] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2016] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[tr_TR-2017] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2017] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[tr_TR-2018] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2018] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[tr_TR-2019] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2019] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[tr_TR-2020] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2020] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[tr_TR-2021] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2021] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[tr_TR-2022] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2022] 1.py')\n\nsnapshots['test_holidata_produces_holidays_for_locale_and_year[tr_TR-2023] 1'] = FileSnapshot('snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2023] 1.py')\n" }, { "alpha_fraction": 0.329084575176239, "alphanum_fraction": 0.37504827976226807, "avg_line_length": 21.719297409057617, "blob_id": "a33936e24f62a055ca6986d0f4af1aaca2d0d758", "content_id": "fafe5a72f88d9fdac29da8fb08a736ac7bbc0ba8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2620, "license_type": "permissive", "max_line_length": 65, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2018] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2018-01-01',\n 'description': 'Yılbaşı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-04-23',\n 'description': 'Ulusal Egemenlik ve Çocuk Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-05-01',\n 'description': 'Emek ve Dayanışma Günü',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-05-19',\n 'description': \"Atatürk'ü Anma, Gençlik ve Spor Bayramı\",\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-06-15',\n 'description': 'Ramazan Bayramı (1. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-06-16',\n 'description': 'Ramazan Bayramı (2. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-06-17',\n 'description': 'Ramazan Bayramı (3. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-07-15',\n 'description': 'Demokrasi ve Milli Birlik Günü',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-08-21',\n 'description': 'Kurban Bayramı (1. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-08-22',\n 'description': 'Kurban Bayramı (2. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-08-23',\n 'description': 'Kurban Bayramı (3. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-08-24',\n 'description': 'Kurban Bayramı (4. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-08-30',\n 'description': 'Zafer Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-10-29',\n 'description': 'Cumhuriyet Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.6000000238418579, "alphanum_fraction": 0.7599999904632568, "avg_line_length": 24, "blob_id": "772bc2086c348c17f7a9b04ebcd239b5851c8599", "content_id": "fafd3da94e2dcec471e29029920f9e5cfd0fee3f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 25, "license_type": "permissive", "max_line_length": 24, "num_lines": 1, "path": "/tests/__init__.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "HOLIDATA_YEAR_MAX = 2024\n" }, { "alpha_fraction": 0.5577889680862427, "alphanum_fraction": 0.6381909251213074, "avg_line_length": 23.875, "blob_id": "a7d1691eac2ef7a021f657726ec86fd7b8fb4762", "content_id": "90a161ee02a691af11aa101385edd74098692158", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 603, "license_type": "permissive", "max_line_length": 50, "num_lines": 24, "path": "/src/holidata/holidays/fr-FR.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom .holidays import Locale\n\n\nclass fr_FR(Locale):\n \"\"\"\n 01-01: [NF] Jour de l'an\n 05-01: [NF] Fête du premier mai\n 05-08: [NF] Armistice 1945\n 07-14: [NF] Fête nationale\n 08-15: [NRF] Assomption\n 11-01: [NRF] Toussaint\n 11-11: [NF] Armistice 1918\n 12-25: [NF] Noël\n 1 day after Easter: [NRV] Lundi de Pâques\n 39 days after Easter: [NRV] Ascension\n 49 days after Easter: [NRV] Pentecôte\n 50 days after Easter: [NRV] Lundi de Pentecôte\n \"\"\"\n\n locale = \"fr-FR\"\n easter_type = EASTER_WESTERN\n" }, { "alpha_fraction": 0.3056092858314514, "alphanum_fraction": 0.3520309329032898, "avg_line_length": 20.112245559692383, "blob_id": "0e004934163362f2f44043946784d815ad2c4f9c", "content_id": "086195f7340c0319a144238fe21119f26281fcae", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2068, "license_type": "permissive", "max_line_length": 42, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2012] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2012-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-04-08',\n 'description': 'Ostern',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-09',\n 'description': 'Ostermontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-17',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-27',\n 'description': 'Pfingsten',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-28',\n 'description': 'Pfingstmontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-07-21',\n 'description': 'Nationalfeiertag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-11-11',\n 'description': 'Waffenstillstand',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-12-25',\n 'description': 'Weihnacht',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.33733752369880676, "alphanum_fraction": 0.38070598244667053, "avg_line_length": 21.994335174560547, "blob_id": "c6a364af3f40356701ab049c8ce664252eefb0b3", "content_id": "87b4b988f3dcc5858f4190b9862e0c0ce7b1d071", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 16304, "license_type": "permissive", "max_line_length": 68, "num_lines": 706, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': 'Año Nuevo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-01-06',\n 'description': 'Epifanía del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-01-07',\n 'description': 'Lunes siguiente a la Epifanía del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'RF'\n },\n {\n 'date': '2013-01-07',\n 'description': 'Lunes siguiente a la Epifanía del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RF'\n },\n {\n 'date': '2013-01-07',\n 'description': 'Lunes siguiente a la Epifanía del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'RF'\n },\n {\n 'date': '2013-01-07',\n 'description': 'Lunes siguiente a la Epifanía del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'RF'\n },\n {\n 'date': '2013-01-07',\n 'description': 'Lunes siguiente a la Epifanía del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'RF'\n },\n {\n 'date': '2013-01-07',\n 'description': 'Lunes siguiente a la Epifanía del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'RF'\n },\n {\n 'date': '2013-01-07',\n 'description': 'Lunes siguiente a la Epifanía del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'RF'\n },\n {\n 'date': '2013-01-07',\n 'description': 'Lunes siguiente a la Epifanía del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CN',\n 'type': 'RF'\n },\n {\n 'date': '2013-01-07',\n 'description': 'Lunes siguiente a la Epifanía del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'RF'\n },\n {\n 'date': '2013-01-07',\n 'description': 'Lunes siguiente a la Epifanía del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'RF'\n },\n {\n 'date': '2013-01-07',\n 'description': 'Lunes siguiente a la Epifanía del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'RF'\n },\n {\n 'date': '2013-01-07',\n 'description': 'Lunes siguiente a la Epifanía del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RF'\n },\n {\n 'date': '2013-01-07',\n 'description': 'Lunes siguiente a la Epifanía del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RF'\n },\n {\n 'date': '2013-02-28',\n 'description': 'Día de Andalucía',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'F'\n },\n {\n 'date': '2013-03-01',\n 'description': 'Día de las Illes Balears',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'F'\n },\n {\n 'date': '2013-03-18',\n 'description': 'Lunes de Fallas',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'F'\n },\n {\n 'date': '2013-03-18',\n 'description': 'Traslado de San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'F'\n },\n {\n 'date': '2013-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'RF'\n },\n {\n 'date': '2013-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RF'\n },\n {\n 'date': '2013-03-19',\n 'description': 'San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'RF'\n },\n {\n 'date': '2013-03-28',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'RV'\n },\n {\n 'date': '2013-03-28',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RV'\n },\n {\n 'date': '2013-03-28',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'RV'\n },\n {\n 'date': '2013-03-28',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'RV'\n },\n {\n 'date': '2013-03-28',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'RV'\n },\n {\n 'date': '2013-03-28',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'RV'\n },\n {\n 'date': '2013-03-28',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'RV'\n },\n {\n 'date': '2013-03-28',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CN',\n 'type': 'RV'\n },\n {\n 'date': '2013-03-28',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'RV'\n },\n {\n 'date': '2013-03-28',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'RV'\n },\n {\n 'date': '2013-03-28',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'RV'\n },\n {\n 'date': '2013-03-28',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'RV'\n },\n {\n 'date': '2013-03-28',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'RV'\n },\n {\n 'date': '2013-03-28',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RV'\n },\n {\n 'date': '2013-03-28',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RV'\n },\n {\n 'date': '2013-03-28',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'RV'\n },\n {\n 'date': '2013-03-28',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'RV'\n },\n {\n 'date': '2013-03-29',\n 'description': 'Viernes Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-03-31',\n 'description': 'Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'RV'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'RV'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'RV'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RV'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'RV'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'RV'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'RV'\n },\n {\n 'date': '2013-04-23',\n 'description': 'Fiesta de Castilla y León',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'F'\n },\n {\n 'date': '2013-04-23',\n 'description': 'San Jorge / Día de Aragón',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RF'\n },\n {\n 'date': '2013-05-01',\n 'description': 'Fiesta del Trabajo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-02',\n 'description': 'Fiesta de la Comunidad de Madrid',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'F'\n },\n {\n 'date': '2013-05-17',\n 'description': 'Día de las Letras Gallegas',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'F'\n },\n {\n 'date': '2013-05-30',\n 'description': 'Corpus Christi',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'RV'\n },\n {\n 'date': '2013-05-30',\n 'description': 'Día de Canarias',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CN',\n 'type': 'F'\n },\n {\n 'date': '2013-05-31',\n 'description': 'Día de Castilla-La Mancha',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'F'\n },\n {\n 'date': '2013-06-10',\n 'description': 'Día de La Rioja',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'F'\n },\n {\n 'date': '2013-06-24',\n 'description': 'San Juan',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'RF'\n },\n {\n 'date': '2013-06-24',\n 'description': 'San Juan',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'RF'\n },\n {\n 'date': '2013-07-25',\n 'description': 'Santiago Apóstol',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'RF'\n },\n {\n 'date': '2013-07-25',\n 'description': 'Santiago Apóstol',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RF'\n },\n {\n 'date': '2013-07-25',\n 'description': 'Santiago Apóstol',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'RF'\n },\n {\n 'date': '2013-07-25',\n 'description': 'Santiago Apóstol / Día Nacional de Galicia',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'RF'\n },\n {\n 'date': '2013-08-15',\n 'description': 'Asunción de la Virgen',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-09-08',\n 'description': 'Día de Asturias',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'F'\n },\n {\n 'date': '2013-09-08',\n 'description': 'Día de Extremadura',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'F'\n },\n {\n 'date': '2013-09-09',\n 'description': 'Lunes siguiente al Día de Asturias',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'F'\n },\n {\n 'date': '2013-09-09',\n 'description': 'Lunes siguiente al Día de Extremadura',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'F'\n },\n {\n 'date': '2013-09-11',\n 'description': 'Fiesta Nacional de Cataluña',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'F'\n },\n {\n 'date': '2013-10-09',\n 'description': 'Día de la Comunitat Valenciana',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'F'\n },\n {\n 'date': '2013-10-12',\n 'description': 'Fiesta Nacional de España',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-10-15',\n 'description': 'Fiesta del Sacrificio (Aid El Kebir)',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RV'\n },\n {\n 'date': '2013-10-15',\n 'description': 'Fiesta del Sacrificio (Eidul Adha)',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'RV'\n },\n {\n 'date': '2013-10-25',\n 'description': 'Día del País Vasco-Euskadiko Eguna',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'F'\n },\n {\n 'date': '2013-11-01',\n 'description': 'Todos los Santos',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-06',\n 'description': 'Día de la Constitución Española',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-12-08',\n 'description': 'Inmaculada Concepción',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-09',\n 'description': 'Lunes siguiente a La Inmaculada Concepción',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'RF'\n },\n {\n 'date': '2013-12-09',\n 'description': 'Lunes siguiente a La Inmaculada Concepción',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RF'\n },\n {\n 'date': '2013-12-09',\n 'description': 'Lunes siguiente a La Inmaculada Concepción',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'RF'\n },\n {\n 'date': '2013-12-09',\n 'description': 'Lunes siguiente a La Inmaculada Concepción',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'RF'\n },\n {\n 'date': '2013-12-09',\n 'description': 'Lunes siguiente a La Inmaculada Concepción',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'RF'\n },\n {\n 'date': '2013-12-09',\n 'description': 'Lunes siguiente a La Inmaculada Concepción',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'RF'\n },\n {\n 'date': '2013-12-09',\n 'description': 'Lunes siguiente a La Inmaculada Concepción',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'RF'\n },\n {\n 'date': '2013-12-09',\n 'description': 'Lunes siguiente a La Inmaculada Concepción',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'RF'\n },\n {\n 'date': '2013-12-25',\n 'description': 'Natividad del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'RF'\n },\n {\n 'date': '2013-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'RF'\n }\n]" }, { "alpha_fraction": 0.5466269850730896, "alphanum_fraction": 0.6349206566810608, "avg_line_length": 27, "blob_id": "ad3618c51b80e13260e56e3f390c4634c4ffa5d7", "content_id": "9fc4f8bf452883a6c5ea957ec4fedde06e9b728a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1012, "license_type": "permissive", "max_line_length": 51, "num_lines": 36, "path": "/src/holidata/holidays/de-AT.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom .holidays import Locale\n\n\nclass de_AT(Locale):\n \"\"\"\n 01-01: [NF] Neujahr\n 01-06: [NRF] Heilige drei Könige\n 03-19: [2,6,7,8] [RF] Josef\n 05-01: [NF] Staatsfeiertag\n 05-04: [4] [F] Florian\n 08-15: [NRF] Mariä Himmelfahrt\n 09-24: [5] [F] Rupert\n 10-10: [2] [F] Tag der Volksabstimmung\n 10-26: [NF] Nationalfeiertag\n 11-01: [NRF] Allerheiligen\n 11-11: [1] [F] Martin\n 11-15: [9,3] [F] Leopold\n 12-08: [NRF] Mariä Empfängnis\n 12-24: [NRF] Heiliger Abend\n 12-25: [NRF] Christtag\n 12-26: [NF] Stefanitag\n 12-31: [NF] Silvester\n 2 days before Easter: [NRV] Karfreitag\n Easter: [NRV] Ostersonntag\n 1 day after Easter: [NRV] Ostermontag\n 39 days after Easter: [NRV] Christi Himmelfahrt\n 49 days after Easter: [NRV] Pfingstsonntag\n 50 days after Easter: [NRV] Pfingstmontag\n 60 days after Easter: [NRV] Fronleichnam\n \"\"\"\n\n locale = \"de-AT\"\n easter_type = EASTER_WESTERN\n" }, { "alpha_fraction": 0.3280318081378937, "alphanum_fraction": 0.3720032870769501, "avg_line_length": 21.624338150024414, "blob_id": "48963a94ab3e7dda33c4cd6deab72ffd99110eed", "content_id": "63aa9779ba8c963388dc96122ff5923f917b9aff", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8586, "license_type": "permissive", "max_line_length": 57, "num_lines": 378, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fr_CA-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': \"Jour de l'An\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-02-16',\n 'description': 'Fête de la Famille',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'AB',\n 'type': 'V'\n },\n {\n 'date': '2015-02-16',\n 'description': 'Fête de la Famille',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'ON',\n 'type': 'V'\n },\n {\n 'date': '2015-02-16',\n 'description': 'Fête de la Famille',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'SK',\n 'type': 'V'\n },\n {\n 'date': '2015-02-16',\n 'description': 'Fête de la Famille',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NB',\n 'type': 'V'\n },\n {\n 'date': '2015-02-16',\n 'description': 'Journée Louis Riel',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'MB',\n 'type': 'V'\n },\n {\n 'date': '2015-02-16',\n 'description': 'Fête des Insulaires',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'PE',\n 'type': 'V'\n },\n {\n 'date': '2015-04-03',\n 'description': 'Vendredi Saint',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-06',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'AB',\n 'type': 'RV'\n },\n {\n 'date': '2015-04-06',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'PE',\n 'type': 'RV'\n },\n {\n 'date': '2015-04-06',\n 'description': 'Lundi de Pâques',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'QC',\n 'type': 'RV'\n },\n {\n 'date': '2015-05-18',\n 'description': 'Fête de la Reine Victoria',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'AB',\n 'type': 'V'\n },\n {\n 'date': '2015-05-18',\n 'description': 'Fête de la Reine Victoria',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'BC',\n 'type': 'V'\n },\n {\n 'date': '2015-05-18',\n 'description': 'Fête de la Reine Victoria',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'MB',\n 'type': 'V'\n },\n {\n 'date': '2015-05-18',\n 'description': 'Fête de la Reine Victoria',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NS',\n 'type': 'V'\n },\n {\n 'date': '2015-05-18',\n 'description': 'Fête de la Reine Victoria',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'ON',\n 'type': 'V'\n },\n {\n 'date': '2015-05-18',\n 'description': 'Fête de la Reine Victoria',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'SK',\n 'type': 'V'\n },\n {\n 'date': '2015-05-18',\n 'description': 'Fête de la Reine Victoria',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NT',\n 'type': 'V'\n },\n {\n 'date': '2015-05-18',\n 'description': 'Fête de la Reine Victoria',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NU',\n 'type': 'V'\n },\n {\n 'date': '2015-05-18',\n 'description': 'Fête de la Reine Victoria',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'YT',\n 'type': 'V'\n },\n {\n 'date': '2015-05-18',\n 'description': 'Journée Nationale des Patriotes',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'QC',\n 'type': 'V'\n },\n {\n 'date': '2015-06-24',\n 'description': 'Fête Nationale',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'QC',\n 'type': 'F'\n },\n {\n 'date': '2015-07-01',\n 'description': 'Fête du Canada',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-08-03',\n 'description': \"Premier lundi d'août\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NT',\n 'type': 'V'\n },\n {\n 'date': '2015-08-03',\n 'description': \"Premier lundi d'août\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NU',\n 'type': 'V'\n },\n {\n 'date': '2015-08-03',\n 'description': 'Fête du Patrimoine',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'AB',\n 'type': 'V'\n },\n {\n 'date': '2015-08-03',\n 'description': 'Fête de la Saskatchewan',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'SK',\n 'type': 'V'\n },\n {\n 'date': '2015-08-03',\n 'description': 'Jour de la Fondation',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NS',\n 'type': 'V'\n },\n {\n 'date': '2015-08-03',\n 'description': 'Jour du Nouveau-Brunswick',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NB',\n 'type': 'V'\n },\n {\n 'date': '2015-09-07',\n 'description': 'Fête du Travail',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2015-10-12',\n 'description': \"Jour de l'Action de grâce\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'AB',\n 'type': 'V'\n },\n {\n 'date': '2015-10-12',\n 'description': \"Jour de l'Action de grâce\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'BC',\n 'type': 'V'\n },\n {\n 'date': '2015-10-12',\n 'description': \"Jour de l'Action de grâce\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'MB',\n 'type': 'V'\n },\n {\n 'date': '2015-10-12',\n 'description': \"Jour de l'Action de grâce\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NL',\n 'type': 'V'\n },\n {\n 'date': '2015-10-12',\n 'description': \"Jour de l'Action de grâce\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'ON',\n 'type': 'V'\n },\n {\n 'date': '2015-10-12',\n 'description': \"Jour de l'Action de grâce\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'QC',\n 'type': 'V'\n },\n {\n 'date': '2015-10-12',\n 'description': \"Jour de l'Action de grâce\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'SK',\n 'type': 'V'\n },\n {\n 'date': '2015-10-12',\n 'description': \"Jour de l'Action de grâce\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NT',\n 'type': 'V'\n },\n {\n 'date': '2015-10-12',\n 'description': \"Jour de l'Action de grâce\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NU',\n 'type': 'V'\n },\n {\n 'date': '2015-10-12',\n 'description': \"Jour de l'Action de grâce\",\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'YT',\n 'type': 'V'\n },\n {\n 'date': '2015-11-11',\n 'description': 'Jour du Souvenir',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'AB',\n 'type': 'F'\n },\n {\n 'date': '2015-11-11',\n 'description': 'Jour du Souvenir',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'BC',\n 'type': 'F'\n },\n {\n 'date': '2015-11-11',\n 'description': 'Jour du Souvenir',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NB',\n 'type': 'F'\n },\n {\n 'date': '2015-11-11',\n 'description': 'Jour du Souvenir',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NL',\n 'type': 'F'\n },\n {\n 'date': '2015-11-11',\n 'description': 'Jour du Souvenir',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': 'NT',\n 'type': 'F'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Jour de Noël',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-26',\n 'description': 'Lendemain de Noël',\n 'locale': 'fr-CA',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3306382894515991, "alphanum_fraction": 0.3748936057090759, "avg_line_length": 21.179244995117188, "blob_id": "d7779868e4b52ec80d8605990555ac0f3de3815f", "content_id": "2513704a1edc3e3e86caa40d09c4bc5f7c2635c8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2370, "license_type": "permissive", "max_line_length": 59, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pl_PL-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Nowy Rok',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-01-06',\n 'description': 'Trzech Króli',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-04-04',\n 'description': 'Wielkanoc',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Poniedziałek Wielkanocny',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Święto Pracy',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-03',\n 'description': 'Święto Konstytucji Trzeciego Maja',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-23',\n 'description': 'Zielone Świątki',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-06-03',\n 'description': 'Boże Ciało',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-08-15',\n 'description': 'Wniebowzięcie Najświętszej Maryi Panny',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Wszystkich Świętych',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-11-11',\n 'description': 'Narodowe Święto Niepodległości',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Boże Narodzenie (pierwszy dzień)',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Boże Narodzenie (drugi dzień)',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31509268283843994, "alphanum_fraction": 0.3614298403263092, "avg_line_length": 20.38679313659668, "blob_id": "6256a1a25bf60401131b0eaaeec9de8ebf150218", "content_id": "df2dc3fc5747fe10c1f243d4629553606f52072a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2293, "license_type": "permissive", "max_line_length": 42, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2012] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2012-01-01',\n 'description': 'Uudenvuodenpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-01-06',\n 'description': 'Loppiainen',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-04-06',\n 'description': 'Pitkäperjantai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-08',\n 'description': 'Pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-09',\n 'description': '2. pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-01',\n 'description': 'Vappu',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-17',\n 'description': 'Helatorstai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-27',\n 'description': 'Helluntaipäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-06-23',\n 'description': 'Juhannuspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-11-03',\n 'description': 'Pyhäinpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-12-06',\n 'description': 'Itsenäisyyspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-12-25',\n 'description': 'Joulupäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-26',\n 'description': 'Tapaninpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3192431628704071, "alphanum_fraction": 0.3643317222595215, "avg_line_length": 20.798246383666992, "blob_id": "eeb4235678d7f3cd54b4f90c32993ccf7045472f", "content_id": "daecd5f3baf830862aab44a028971993b3fef492", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2495, "license_type": "permissive", "max_line_length": 54, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Ano Novo',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-03-04',\n 'description': 'Carnaval',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Sexta-feira Santa',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-20',\n 'description': 'Páscoa',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-25',\n 'description': 'Dia da Liberdade',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Dia do Trabalhador',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-06-10',\n 'description': 'Dia de Portugal',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-06-19',\n 'description': 'Corpo de Deus',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-08-15',\n 'description': 'Assunção de Nossa Senhora',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-10-05',\n 'description': 'Implantação da República',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Dia de Todos os Santos',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-12-01',\n 'description': 'Restauração da Independência',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-12-08',\n 'description': 'Imaculada Conceição',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Natal',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3314873278141022, "alphanum_fraction": 0.37579113245010376, "avg_line_length": 21.1842098236084, "blob_id": "10b96d35b9df0b88e4f41ccd6bd3f7146f0d7cc0", "content_id": "4457f3d082260f4816b526ba6a1312c06020046b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2768, "license_type": "permissive", "max_line_length": 93, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2017] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2017-01-01',\n 'description': 'Πρωτοχρονιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-01-06',\n 'description': 'Θεοφάνεια',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-02-27',\n 'description': 'Καθαρά Δευτέρα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-03-25',\n 'description': 'Ευαγγελισμός της Θεοτόκου και Εθνική Ημέρα Ανεξαρτησίας της Ελλάδας',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-04-14',\n 'description': 'Μεγάλη Παρασκευή',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-15',\n 'description': 'Μεγάλο Σάββατο',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-16',\n 'description': 'Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-17',\n 'description': 'Δευτέρα του Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-05-01',\n 'description': 'Πρωτομαγιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-06-05',\n 'description': 'Δευτέρα του Αγίου Πνεύματος',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-08-15',\n 'description': 'Κοίμηση της Θεοτόκου',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-10-28',\n 'description': 'Ημέρα του ΌΧΙ',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-12-25',\n 'description': 'Χριστούγεννα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-26',\n 'description': 'Επόμενη ημέρα Χριστουγέννων',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3322019577026367, "alphanum_fraction": 0.3763258457183838, "avg_line_length": 21.245283126831055, "blob_id": "89be3702b955e0f019657fb846f32008e9d7885a", "content_id": "ad97b8d347779ef2ec0ea2439354961f22d2039d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2378, "license_type": "permissive", "max_line_length": 59, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pl_PL-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': 'Nowy Rok',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-01-06',\n 'description': 'Trzech Króli',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-03-31',\n 'description': 'Wielkanoc',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Poniedziałek Wielkanocny',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-01',\n 'description': 'Święto Pracy',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-03',\n 'description': 'Święto Konstytucji Trzeciego Maja',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-19',\n 'description': 'Zielone Świątki',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-30',\n 'description': 'Boże Ciało',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-08-15',\n 'description': 'Wniebowzięcie Najświętszej Maryi Panny',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-11-01',\n 'description': 'Wszystkich Świętych',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-11-11',\n 'description': 'Narodowe Święto Niepodległości',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-12-25',\n 'description': 'Boże Narodzenie (pierwszy dzień)',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-26',\n 'description': 'Boże Narodzenie (drugi dzień)',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.32209739089012146, "alphanum_fraction": 0.3670412003993988, "avg_line_length": 20.66216278076172, "blob_id": "df0dd8528df8c0bbd809c697dd12bd634f5732cf", "content_id": "b90c73535b22bcda35d5b4ae1c65e12673e9f5a3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1738, "license_type": "permissive", "max_line_length": 52, "num_lines": 74, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': 'Новый Год',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-01-07',\n 'description': 'Рождество Христово',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-02-23',\n 'description': 'День защитника Отечества',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-03-08',\n 'description': 'Международный женский день',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-04-28',\n 'description': 'Пасха',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Праздник весны и труда',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-05-09',\n 'description': 'День Победы',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-06-12',\n 'description': 'День России',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-11-04',\n 'description': 'День народного единства',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31142035126686096, "alphanum_fraction": 0.3574855923652649, "avg_line_length": 20.275510787963867, "blob_id": "822aa370e5725808025228c63915784e6e331f9a", "content_id": "8dfa09b7d0444f7c44af067b9476cddc97192dff", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2088, "license_type": "permissive", "max_line_length": 47, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': 'Nytårsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-04-02',\n 'description': 'Skærtorsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-03',\n 'description': 'Langfredag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-05',\n 'description': 'Påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-06',\n 'description': 'Anden påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-01',\n 'description': 'Store bededag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-14',\n 'description': 'Kristi himmelfartsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-24',\n 'description': 'Pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-25',\n 'description': 'Anden pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-06-05',\n 'description': 'Grundlovsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-26',\n 'description': 'Anden juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3079564869403839, "alphanum_fraction": 0.35374927520751953, "avg_line_length": 20.317073822021484, "blob_id": "1d4f65fd2c7ffd0063828bcd419e1a029b4c7111", "content_id": "550420b9e8fee942423c63cd7f6e7fd44b4ae3da", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1747, "license_type": "permissive", "max_line_length": 50, "num_lines": 82, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-01-02',\n 'description': \"Day after New Year's Day\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2019-02-06',\n 'description': 'Waitangi Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Good Friday',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Easter Monday',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-25',\n 'description': 'ANZAC Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-06-03',\n 'description': \"Queen's Birthday\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2019-10-28',\n 'description': 'Labour Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Boxing Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.34658291935920715, "alphanum_fraction": 0.3900815546512604, "avg_line_length": 22.29257583618164, "blob_id": "043dcd2914155bd1ea62cdf52c33434b32c7434a", "content_id": "8153b9b4068d2e7d92047208579eff9aeca76bbb", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10759, "license_type": "permissive", "max_line_length": 71, "num_lines": 458, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_BR-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': 'Confraternização Universal',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-01-04',\n 'description': 'Criação do Estado de Rondônia',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'RO',\n 'type': 'F'\n },\n {\n 'date': '2011-01-23',\n 'description': 'Dia do Evangélico no Acre',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'AC',\n 'type': 'RF'\n },\n {\n 'date': '2011-03-06',\n 'description': 'Revolução Pernambucana de 1817',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'PE',\n 'type': 'F'\n },\n {\n 'date': '2011-03-08',\n 'description': 'Dia Internacional da Mulher',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'AC',\n 'type': 'F'\n },\n {\n 'date': '2011-03-08',\n 'description': 'Carnaval',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-03-18',\n 'description': 'Autonomia do Estado de Tocantins',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'TO',\n 'type': 'F'\n },\n {\n 'date': '2011-03-19',\n 'description': 'Dia de São José',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'AP',\n 'type': 'RF'\n },\n {\n 'date': '2011-03-19',\n 'description': 'Dia de São José',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'CE',\n 'type': 'RF'\n },\n {\n 'date': '2011-03-25',\n 'description': 'Abolição da Escravidão no Ceará',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'CE',\n 'type': 'F'\n },\n {\n 'date': '2011-04-21',\n 'description': 'Fundação de Brasília',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'DF',\n 'type': 'F'\n },\n {\n 'date': '2011-04-21',\n 'description': 'Execução de Tiradentes',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'MG',\n 'type': 'F'\n },\n {\n 'date': '2011-04-21',\n 'description': 'Tiradentes',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-04-23',\n 'description': 'Dia de São Jorge',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'RJ',\n 'type': 'RF'\n },\n {\n 'date': '2011-04-24',\n 'description': 'Páscoa',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-05-01',\n 'description': 'Dia Internacional do Trabalhador',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-15',\n 'description': 'Aniversário do Estado do Acre',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'AC',\n 'type': 'F'\n },\n {\n 'date': '2011-06-18',\n 'description': 'Dia do Evangélico em Rondônia',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'RO',\n 'type': 'RF'\n },\n {\n 'date': '2011-06-24',\n 'description': 'São João',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'AL',\n 'type': 'RF'\n },\n {\n 'date': '2011-06-24',\n 'description': 'São João',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'PE',\n 'type': 'RF'\n },\n {\n 'date': '2011-06-29',\n 'description': 'São Pedro',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'AL',\n 'type': 'RF'\n },\n {\n 'date': '2011-07-02',\n 'description': 'Independência da Bahia',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'BA',\n 'type': 'F'\n },\n {\n 'date': '2011-07-08',\n 'description': 'Emancipação Política de Sergipe',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'SE',\n 'type': 'F'\n },\n {\n 'date': '2011-07-09',\n 'description': 'Revolução Constitucionalista de 1932',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'SP',\n 'type': 'F'\n },\n {\n 'date': '2011-07-26',\n 'description': 'Fundação da Cidade de Goiás',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'GO',\n 'type': 'F'\n },\n {\n 'date': '2011-07-28',\n 'description': 'Adesão do Maranhão à Independência do Brasil',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'MA',\n 'type': 'F'\n },\n {\n 'date': '2011-08-05',\n 'description': 'Fundação do Estado da Paraíba',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'PB',\n 'type': 'F'\n },\n {\n 'date': '2011-08-07',\n 'description': 'Dia do Rio Grande do Norte',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'RN',\n 'type': 'F'\n },\n {\n 'date': '2011-08-11',\n 'description': 'Dia de Santa Catarina',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'SC',\n 'type': 'F'\n },\n {\n 'date': '2011-08-15',\n 'description': 'Dia de Nossa Senhora da Assunção',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'CE',\n 'type': 'RF'\n },\n {\n 'date': '2011-08-15',\n 'description': 'Adesão do Pará à Independência do Brasil',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'PA',\n 'type': 'F'\n },\n {\n 'date': '2011-09-05',\n 'description': 'Dia da Amazônia',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'AC',\n 'type': 'F'\n },\n {\n 'date': '2011-09-05',\n 'description': 'Elevação do Amazonas à Categoria de Província',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'AM',\n 'type': 'F'\n },\n {\n 'date': '2011-09-07',\n 'description': 'Independência do Brasil',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-09-08',\n 'description': 'Nossa Senhora da Natividade',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'TO',\n 'type': 'F'\n },\n {\n 'date': '2011-09-13',\n 'description': 'Criação do Território Federal do Amapá',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'AP',\n 'type': 'F'\n },\n {\n 'date': '2011-09-16',\n 'description': 'Emancipação Política do Alagoas',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'AL',\n 'type': 'F'\n },\n {\n 'date': '2011-09-20',\n 'description': 'Dia do Gaúcho',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'RS',\n 'type': 'F'\n },\n {\n 'date': '2011-10-03',\n 'description': 'Mártires de Cunhaú e Uruaçu',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'RN',\n 'type': 'F'\n },\n {\n 'date': '2011-10-05',\n 'description': 'Criação dos Estado de Roraima',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'RR',\n 'type': 'F'\n },\n {\n 'date': '2011-10-05',\n 'description': 'Criação dos Estado de Tocantins',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'TO',\n 'type': 'F'\n },\n {\n 'date': '2011-10-11',\n 'description': 'Criação do Estado do Mato Grosso do Sul',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'MS',\n 'type': 'F'\n },\n {\n 'date': '2011-10-12',\n 'description': 'Nossa Senhora Aparecida',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-10-19',\n 'description': 'Dia do Piauí',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'PI',\n 'type': 'F'\n },\n {\n 'date': '2011-10-24',\n 'description': 'Pedra Fundamental de Goiânia',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'GO',\n 'type': 'F'\n },\n {\n 'date': '2011-11-02',\n 'description': 'Finados',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-11-15',\n 'description': 'Proclamação da República',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-11-17',\n 'description': 'Assinatura do Tratado de Petrópolis',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'AC',\n 'type': 'F'\n },\n {\n 'date': '2011-11-20',\n 'description': 'Morte de Zumbi dos Palmares',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'AL',\n 'type': 'F'\n },\n {\n 'date': '2011-11-20',\n 'description': 'Dia da Consciência Negra',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'AM',\n 'type': 'F'\n },\n {\n 'date': '2011-11-20',\n 'description': 'Dia da Consciência Negra',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'MT',\n 'type': 'F'\n },\n {\n 'date': '2011-11-20',\n 'description': 'Dia da Consciência Negra',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'RJ',\n 'type': 'F'\n },\n {\n 'date': '2011-11-25',\n 'description': 'Dia de Santa Catarina de Alexandria',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'SC',\n 'type': 'RF'\n },\n {\n 'date': '2011-11-30',\n 'description': 'Dia do Evangélico do Distrito Federal',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'DF',\n 'type': 'RF'\n },\n {\n 'date': '2011-12-08',\n 'description': 'Nossa Senhora da Conceição',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'AM',\n 'type': 'RF'\n },\n {\n 'date': '2011-12-19',\n 'description': 'Emancipação Política do Estado do Paraná',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': 'PR',\n 'type': 'F'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Natal',\n 'locale': 'pt-BR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.4516128897666931, "alphanum_fraction": 0.6612903475761414, "avg_line_length": 14.5, "blob_id": "e4e7cfb91a5d33a6633306ab6daba6038cc2bc28", "content_id": "13495a476c15b70356494f16c209345293ee27d8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 62, "license_type": "permissive", "max_line_length": 22, "num_lines": 4, "path": "/requirements.txt", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "arrow==1.2.2\ndocopt==0.6.2\npython-dateutil==2.8.2\nsix==1.16.0\n" }, { "alpha_fraction": 0.5986732840538025, "alphanum_fraction": 0.6583747863769531, "avg_line_length": 24.125, "blob_id": "5144c169c455957d6c9311cfddc95bb256d78c46", "content_id": "970a452b077385ffcb33fd6c3dc1068df5aaab36", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 604, "license_type": "permissive", "max_line_length": 51, "num_lines": 24, "path": "/src/holidata/holidays/de-BE.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom .holidays import Locale\n\n\nclass de_BE(Locale):\n \"\"\"\n 01-01: [NF] Neujahr\n 05-01: [NF] Tag der Arbeit\n 07-21: [NF] Nationalfeiertag\n 08-15: [NRF] Mariä Himmelfahrt\n 11-01: [NRF] Allerheiligen\n 11-11: [NF] Waffenstillstand\n 12-25: [NRF] Weihnacht\n Easter: [NRV] Ostern\n 1 day after Easter: [NRV] Ostermontag\n 39 days after Easter: [NRV] Christi Himmelfahrt\n 49 days after Easter: [NRV] Pfingsten\n 50 days after Easter: [NRV] Pfingstmontag\n \"\"\"\n\n locale = \"de-BE\"\n easter_type = EASTER_WESTERN\n" }, { "alpha_fraction": 0.5381739139556885, "alphanum_fraction": 0.5718292593955994, "avg_line_length": 34.263736724853516, "blob_id": "8e384b6dd89729c34a04f573c852fbc41aa07f68", "content_id": "a2e6fba2f33e993e2be831732cbe2e64c555b53d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3209, "license_type": "permissive", "max_line_length": 104, "num_lines": 91, "path": "/src/holidata/holidays/en-ZA.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom holidata.utils import SmartDayArrow\nfrom .holidays import Holiday, Locale\n\n\"\"\"\nPublic Holidays Act (Act No 36 of 1994).\nsources: \nhttps://www.gov.za/sites/default/files/gcis_document/201409/act36of1994.pdf\nhttps://www.gov.za/sites/default/files/gcis_document/201409/act48of1995.pdf\n\"\"\"\n\n\nclass en_ZA(Locale):\n \"\"\"\n 12-25: [NF] Christmas Day\n 2 days before Easter: [NRV] Good Friday\n 1 day after Easter: [NRV] Family Day\n \"\"\"\n\n locale = \"en-ZA\"\n easter_type = EASTER_WESTERN\n\n def holiday_new_years_day(self):\n \"\"\"01-01: [NF] New Year's Day\"\"\"\n return self.get_holidays(SmartDayArrow(self.year, 1, 1), \"New Year's Day\")\n\n def holiday_human_rights_day(self):\n \"\"\"03-21: [NF] Human Rights Day\"\"\"\n return self.get_holidays(SmartDayArrow(self.year, 3, 21), \"Human Rights Day\")\n\n def holiday_freedom_day(self):\n \"\"\"04-27: [NF] Freedom Day\"\"\"\n return self.get_holidays(SmartDayArrow(self.year, 4, 27), \"Freedom Day\")\n\n def holiday_workers_day(self):\n \"\"\"05-01: [NF] Worker's Day\"\"\"\n return self.get_holidays(SmartDayArrow(self.year, 5, 1), \"Worker's Day\")\n\n def holiday_youth_day(self):\n \"\"\"06-16: [NF] Youth Day\"\"\"\n return self.get_holidays(SmartDayArrow(self.year, 6, 16), \"Youth Day\")\n\n def holiday_national_womens_day(self):\n \"\"\"08-09: [NF] National Women's Day\"\"\"\n return self.get_holidays(SmartDayArrow(self.year, 8, 9), \"National Women's Day\")\n\n def holiday_heritage_day(self):\n \"\"\"09-24: [NF] Heritage Day\"\"\"\n return self.get_holidays(SmartDayArrow(self.year, 9, 24), \"Heritage Day\")\n\n def holiday_day_of_reconciliation(self):\n \"\"\"12-16: [NF] Day of Reconciliation\"\"\"\n return self.get_holidays(SmartDayArrow(self.year, 12, 16), \"Day of Reconciliation\")\n\n def holiday_day_of_goodwill(self):\n \"\"\"12-26: [NF] Day of Goodwill\"\"\"\n return self.get_holidays(SmartDayArrow(self.year, 12, 26), \"Day of Goodwill\")\n\n def get_holidays(self, original_date, description):\n \"\"\"\n Applies section 2.1 of the Public Holidays Act (Act No 36 of 1994):\n 'Whenever any public holiday falls on a Sunday, the following Monday shall be a public holiday.'\n \"\"\"\n if original_date.weekday() == \"sunday\":\n supplement_date = original_date.shift(days=1)\n return [\n Holiday(\n locale=self.locale,\n region='',\n date=original_date,\n description=description,\n flags=\"NF\",\n notes=\"\"),\n Holiday(\n locale=self.locale,\n region='',\n date=supplement_date,\n description=description + \" (Supplement)\",\n flags=\"NF\",\n notes=\"Supplement holiday\")\n ]\n else:\n return [Holiday(\n locale=self.locale,\n region='',\n date=original_date,\n description=description,\n flags=\"NF\",\n notes=\"\")]\n" }, { "alpha_fraction": 0.5982142686843872, "alphanum_fraction": 0.5982142686843872, "avg_line_length": 15, "blob_id": "dc4bebc63f3083224193d91c7df4e6dc11b31c3c", "content_id": "f1f8503b88b714897bef76c9e78173177a97e17e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 112, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/FR.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass FR(Country):\n id = \"FR\"\n languages = [\"fr\"]\n default_lang = \"fr\"\n" }, { "alpha_fraction": 0.5766870975494385, "alphanum_fraction": 0.6441717743873596, "avg_line_length": 22.285715103149414, "blob_id": "f4db174de1a32868239c6ad59a6e7a6b3b709add", "content_id": "db3a737d09b7ca713434c079d1bc6dc0a2110daf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 625, "license_type": "permissive", "max_line_length": 43, "num_lines": 21, "path": "/src/holidata/holidays/ru-RU.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_ORTHODOX\n\nfrom .holidays import Locale\n\n\nclass ru_RU(Locale):\n \"\"\"\n 01-01: [NF] Новый Год\n 01-07: [NRF] Рождество Христово\n 02-23: [NF] День защитника Отечества\n 03-08: [NF] Международный женский день\n 05-01: [NF] Праздник весны и труда\n 05-09: [NF] День Победы\n 06-12: [NF] День России\n 11-04: [NF] День народного единства\n Easter: [NRV] Пасха\n \"\"\"\n\n locale = \"ru-RU\"\n easter_type = EASTER_ORTHODOX\n" }, { "alpha_fraction": 0.6465798020362854, "alphanum_fraction": 0.665037989616394, "avg_line_length": 46.21794891357422, "blob_id": "a0c8bf403b053e7970e83053dd9e12b940df7da5", "content_id": "4f1d02f6fb531d14d31922969185042bb9d7f1b7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3699, "license_type": "permissive", "max_line_length": 163, "num_lines": 78, "path": "/src/holidata/holidays/fr-CA.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom holidata.utils import SmartDayArrow\nfrom .holidays import Holiday, Locale\n\n#\n# Information taken from various government websites around 2020-04-16\n# https://www.canada.ca/fr/emploi-developpement-social/services/normes-travail/rapports/jours-feries.html\n# http://www4.gouv.qc.ca/FR/Portail/Citoyens/Evenements/travailleur-avec-salaire/Pages/jours-feries-chomes-payes.aspx\n# https://www.ontario.ca/document/your-guide-employment-standards-act-0/public-holidays#section-2\n# https://www.cfib-fcei.ca/en/tools-resources/paying-employees-public-holidays-newfoundland-labrador\n# https://www.princeedwardisland.ca/en/information/economic-growth-tourism-and-culture/paid-holidays\n# https://www2.gnb.ca/content/dam/gnb/Departments/petl-epft/PDF/es/FactSheets/PublicHolidaysVacation.pdf\n# https://www.gov.mb.ca/labour/standards/doc,gen-holidays-after-april-30-07,factsheet.html#q12\n# https://www.saskatchewan.ca/business/employment-standards/vacations-holidays-leaves-and-absences/public-statutory-holidays/list-of-saskatchewan-public-holidays\n#\n# Also those sites for some information\n# https://www.officeholidays.com/holidays/canada/canada-remembrance-day\n# https://www.timeanddate.com/holidays/canada/family-day\n# https://fr.wikipedia.org/wiki/F%C3%AAtes_et_jours_f%C3%A9ri%C3%A9s_au_Canada\n#\n# I have not checked every province and territory website, but the wikipedia\n# summary has been true for everything that I have checked, although it seems\n# to be considering more holidays than the bare minimum and also counts holidays\n# that are not mandated but usually observed (e.g. St-Patrick's Day in NL which\n# is not statutory, but is given to government employees).\n#\n\n\nclass fr_CA(Locale):\n \"\"\"\n 01-01: [NF] Jour de l'An\n 06-24: [QC] [F] Fête Nationale\n 07-01: [NF] Fête du Canada\n 11-11: [AB,BC,NB,NL,NT] [F] Jour du Souvenir\n 12-25: [NRF] Jour de Noël\n 12-26: [NRF] Lendemain de Noël\n 2 days before Easter: [NRV] Vendredi Saint\n 1 day after Easter: [AB,PE,QC] [RV] Lundi de Pâques\n 3. monday in February: [AB,ON,SK,NB] [V] Fête de la Famille\n 3. monday in February: [MB] [V] Journée Louis Riel\n 3. monday in February: [PE] [V] Fête des Insulaires\n 1. monday in August: [NT,NU] [V] Premier lundi d'août\n 1. monday in August: [AB] [V] Fête du Patrimoine\n 1. monday in August: [SK] [V] Fête de la Saskatchewan\n 1. monday in August: [NS] [V] Jour de la Fondation\n 1. monday in August: [NB] [V] Jour du Nouveau-Brunswick\n 1. monday in September: [NV] Fête du Travail\n 2. monday in October: [AB,BC,MB,NL,ON,QC,SK,NT,NU,YT] [V] Jour de l'Action de grâce\n \"\"\"\n\n locale = \"fr-CA\"\n easter_type = EASTER_WESTERN\n\n def holiday_journee_nationale_des_patriotes(self):\n return [Holiday(\n locale=self.locale,\n region=\"QC\",\n date=SmartDayArrow(self.year, 5, 25).shift_to_weekday(\n \"monday\", order=1, reverse=True, including=False),\n description=\"Journée Nationale des Patriotes\",\n flags=\"V\",\n notes=\"\")]\n\n\n def holiday_fete_de_la_reine_victoria(self):\n return [Holiday(\n locale=self.locale,\n region=region,\n date=SmartDayArrow(self.year, 5, 25).shift_to_weekday(\n \"monday\", order=1, reverse=True, including=False),\n description=\"Fête de la Reine Victoria\",\n flags=\"V\",\n notes=\"\"\n ) for region in\n [\"AB\", \"BC\", \"MB\", \"NS\", \"ON\", \"SK\", \"NT\", \"NU\", \"YT\"]\n ]\n\n" }, { "alpha_fraction": 0.31389227509498596, "alphanum_fraction": 0.35925474762916565, "avg_line_length": 20.66666603088379, "blob_id": "5f14c99df9985dac7981b4c2b9fcfdab73947d9a", "content_id": "26d439ce361417d87d44127e783caa634e4195f1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2469, "license_type": "permissive", "max_line_length": 49, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_US-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-01-17',\n 'description': 'Birthday of Martin Luther King, Jr.',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-02-21',\n 'description': \"Washington's Birthday\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-04-18',\n 'description': \"Patriots' Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': 'MA',\n 'type': 'V'\n },\n {\n 'date': '2022-04-18',\n 'description': \"Patriots' Day\",\n 'locale': 'en-US',\n 'notes': '',\n 'region': 'ME',\n 'type': 'V'\n },\n {\n 'date': '2022-05-30',\n 'description': 'Memorial Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-07-04',\n 'description': 'Independence Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-09-05',\n 'description': 'Labor Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-10-10',\n 'description': 'Columbus Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-11-11',\n 'description': 'Veterans Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-11-24',\n 'description': 'Thanksgiving Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-11-25',\n 'description': 'Day after Thanksgiving',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2022-12-24',\n 'description': 'Christmas Eve',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3200301229953766, "alphanum_fraction": 0.3652108311653137, "avg_line_length": 20.778688430786133, "blob_id": "78945f7df346fa567b603c34e8375e4d5b053082", "content_id": "1b7f69f7dc29dd580549481dc219e4173fd1238b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2663, "license_type": "permissive", "max_line_length": 56, "num_lines": 122, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2012] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2012-01-01',\n 'description': 'Novo leto',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-01-02',\n 'description': 'Novo leto',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-02-08',\n 'description': 'Prešernov dan',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-04-08',\n 'description': 'Velikonočna nedelja',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-09',\n 'description': 'Velikonočni ponedeljek',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-27',\n 'description': 'Dan upora proti okupatorju',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-01',\n 'description': 'Praznik dela',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-02',\n 'description': 'Praznik dela',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-28',\n 'description': 'Binkošti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-06-25',\n 'description': 'Dan državnosti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-08-15',\n 'description': 'Marijino vnebovzetje',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-10-31',\n 'description': 'Dan reformacije',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-11-01',\n 'description': 'Dan spomina na mrtve',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-12-25',\n 'description': 'Božič',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-12-26',\n 'description': 'Dan samostojnosti in enotnosti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31142035126686096, "alphanum_fraction": 0.3574855923652649, "avg_line_length": 20.275510787963867, "blob_id": "fe49e7a7b12c3ba8bda79df26af2e1e16db0d1f9", "content_id": "96ba747b1de885ce6648fbcd084b74805dd56d25", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2088, "license_type": "permissive", "max_line_length": 47, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[da_DK-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': 'Nytårsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-04-06',\n 'description': 'Skærtorsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-07',\n 'description': 'Langfredag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-09',\n 'description': 'Påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-10',\n 'description': 'Anden påskedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-05',\n 'description': 'Store bededag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-18',\n 'description': 'Kristi himmelfartsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-28',\n 'description': 'Pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-29',\n 'description': 'Anden pinsedag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-06-05',\n 'description': 'Grundlovsdag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-26',\n 'description': 'Anden juledag',\n 'locale': 'da-DK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3293954133987427, "alphanum_fraction": 0.37665045261383057, "avg_line_length": 21.146154403686523, "blob_id": "48aa2adc7b7a384d17875a1fee6d5b8fae7100bd", "content_id": "8f868683fd5991f66f736941061e7c2ddfba3e4d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2900, "license_type": "permissive", "max_line_length": 49, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Nýársdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-04-17',\n 'description': 'Skírdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Föstudagurinn langi',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-20',\n 'description': 'Páskadagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Annar dagur páska',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-24',\n 'description': 'Sumardagurinn fyrsti',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Verkalýðsdagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-29',\n 'description': 'Uppstigningardagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-08',\n 'description': 'Hvítasunnudagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Annar dagur hvítasunnu',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-17',\n 'description': 'Þjóðhátíðardagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-08-04',\n 'description': 'Frídagur verslunarmanna',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-12-24',\n 'description': 'Aðfangadagur jóla',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Jóladagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Annar dagur jóla',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-31',\n 'description': 'Gamlársdagur',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.30328208208084106, "alphanum_fraction": 0.35189032554626465, "avg_line_length": 20.305309295654297, "blob_id": "f0945ce7298c51d46cc36480f1bd44807a130dbb", "content_id": "a0ac6cf3ca7674d0a119ac7caa705acc8b5f6d2f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4817, "license_type": "permissive", "max_line_length": 49, "num_lines": 226, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2012] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2012-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-01-06',\n 'description': 'Heilige drei Könige',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '2',\n 'type': 'RF'\n },\n {\n 'date': '2012-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '6',\n 'type': 'RF'\n },\n {\n 'date': '2012-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '7',\n 'type': 'RF'\n },\n {\n 'date': '2012-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '8',\n 'type': 'RF'\n },\n {\n 'date': '2012-04-06',\n 'description': 'Karfreitag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-08',\n 'description': 'Ostersonntag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-09',\n 'description': 'Ostermontag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-01',\n 'description': 'Staatsfeiertag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-04',\n 'description': 'Florian',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '4',\n 'type': 'F'\n },\n {\n 'date': '2012-05-17',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-27',\n 'description': 'Pfingstsonntag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-28',\n 'description': 'Pfingstmontag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-06-07',\n 'description': 'Fronleichnam',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-09-24',\n 'description': 'Rupert',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '5',\n 'type': 'F'\n },\n {\n 'date': '2012-10-10',\n 'description': 'Tag der Volksabstimmung',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '2',\n 'type': 'F'\n },\n {\n 'date': '2012-10-26',\n 'description': 'Nationalfeiertag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-11-11',\n 'description': 'Martin',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '1',\n 'type': 'F'\n },\n {\n 'date': '2012-11-15',\n 'description': 'Leopold',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '9',\n 'type': 'F'\n },\n {\n 'date': '2012-11-15',\n 'description': 'Leopold',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '3',\n 'type': 'F'\n },\n {\n 'date': '2012-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-24',\n 'description': 'Heiliger Abend',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-25',\n 'description': 'Christtag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-26',\n 'description': 'Stefanitag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-12-31',\n 'description': 'Silvester',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.329084575176239, "alphanum_fraction": 0.37504827976226807, "avg_line_length": 21.719297409057617, "blob_id": "70412d191f296a3b65f7a55c3cf5297554963198", "content_id": "7c692a1a0f0aca3aeec5dd2ff3ca679172034cd1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2620, "license_type": "permissive", "max_line_length": 65, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2020] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2020-01-01',\n 'description': 'Yılbaşı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-04-23',\n 'description': 'Ulusal Egemenlik ve Çocuk Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-01',\n 'description': 'Emek ve Dayanışma Günü',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-19',\n 'description': \"Atatürk'ü Anma, Gençlik ve Spor Bayramı\",\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-24',\n 'description': 'Ramazan Bayramı (1. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-25',\n 'description': 'Ramazan Bayramı (2. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-26',\n 'description': 'Ramazan Bayramı (3. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-07-15',\n 'description': 'Demokrasi ve Milli Birlik Günü',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-07-31',\n 'description': 'Kurban Bayramı (1. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-08-01',\n 'description': 'Kurban Bayramı (2. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-08-02',\n 'description': 'Kurban Bayramı (3. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-08-03',\n 'description': 'Kurban Bayramı (4. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-08-30',\n 'description': 'Zafer Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-10-29',\n 'description': 'Cumhuriyet Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3186638355255127, "alphanum_fraction": 0.3643445670604706, "avg_line_length": 20.852941513061523, "blob_id": "9c85eca574b6e016b5c52304ccab1111d367c348", "content_id": "3299a6c70878af5c31ed2ec356ab3358aca3a9cc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 24547, "license_type": "permissive", "max_line_length": 45, "num_lines": 1122, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_CH-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': 'Neujahrstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-01-02',\n 'description': 'Berchtoldstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BE',\n 'type': 'F'\n },\n {\n 'date': '2019-01-02',\n 'description': 'Berchtoldstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'F'\n },\n {\n 'date': '2019-01-02',\n 'description': 'Berchtoldstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'F'\n },\n {\n 'date': '2019-01-02',\n 'description': 'Berchtoldstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VD',\n 'type': 'F'\n },\n {\n 'date': '2019-01-06',\n 'description': 'Heilige Drei Könige',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2019-01-06',\n 'description': 'Heilige Drei Könige',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2019-01-06',\n 'description': 'Heilige Drei Könige',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2019-03-19',\n 'description': 'Josefstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RF'\n },\n {\n 'date': '2019-03-19',\n 'description': 'Josefstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2019-03-19',\n 'description': 'Josefstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2019-03-19',\n 'description': 'Josefstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2019-03-19',\n 'description': 'Josefstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VS',\n 'type': 'RF'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AG',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BE',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BL',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BS',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'FR',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GE',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GL',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GR',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NE',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'OW',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SG',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SH',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SO',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VD',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZG',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Karfreitag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZH',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-21',\n 'description': 'Ostersonntag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BE',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BL',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BS',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GE',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GL',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GR',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SG',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SH',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VD',\n 'type': 'RV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Ostermontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZH',\n 'type': 'RV'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BL',\n 'type': 'F'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BS',\n 'type': 'F'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GR',\n 'type': 'F'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NE',\n 'type': 'F'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SH',\n 'type': 'F'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'F'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'F'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZH',\n 'type': 'F'\n },\n {\n 'date': '2019-05-30',\n 'description': 'Auffahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-09',\n 'description': 'Pfingstsonntag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BE',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BL',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BS',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GE',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GL',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GR',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SG',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SH',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VD',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Pfingstmontag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZH',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-20',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-20',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-20',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-20',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-20',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'OW',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-20',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-20',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-20',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-20',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VS',\n 'type': 'RV'\n },\n {\n 'date': '2019-06-20',\n 'description': 'Fronleichnam',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZG',\n 'type': 'RV'\n },\n {\n 'date': '2019-08-01',\n 'description': 'Bundesfeier',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RF'\n },\n {\n 'date': '2019-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RF'\n },\n {\n 'date': '2019-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RF'\n },\n {\n 'date': '2019-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RF'\n },\n {\n 'date': '2019-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'OW',\n 'type': 'RF'\n },\n {\n 'date': '2019-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2019-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2019-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2019-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VS',\n 'type': 'RF'\n },\n {\n 'date': '2019-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZG',\n 'type': 'RF'\n },\n {\n 'date': '2019-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RF'\n },\n {\n 'date': '2019-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GL',\n 'type': 'RF'\n },\n {\n 'date': '2019-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'JU',\n 'type': 'RF'\n },\n {\n 'date': '2019-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RF'\n },\n {\n 'date': '2019-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RF'\n },\n {\n 'date': '2019-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'OW',\n 'type': 'RF'\n },\n {\n 'date': '2019-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SG',\n 'type': 'RF'\n },\n {\n 'date': '2019-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2019-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2019-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2019-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VS',\n 'type': 'RF'\n },\n {\n 'date': '2019-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZG',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'OW',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'VS',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZG',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Weihnachtstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AI',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BE',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BL',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'BS',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GL',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'GR',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'LU',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SG',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SH',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'SZ',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TG',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'TI',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'UR',\n 'type': 'RF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Stephanstag',\n 'locale': 'de-CH',\n 'notes': '',\n 'region': 'ZH',\n 'type': 'RF'\n }\n]" }, { "alpha_fraction": 0.31833910942077637, "alphanum_fraction": 0.36332181096076965, "avg_line_length": 20.820755004882812, "blob_id": "2beaff829018f8159e9a60dc3fd56df1fab6ee63", "content_id": "3d1cd1a3e602feb11e98a5cec105f1b97392b67b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2312, "license_type": "permissive", "max_line_length": 59, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2020] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2020-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-03-21',\n 'description': 'Human Rights Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-04-10',\n 'description': 'Good Friday',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Family Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-27',\n 'description': 'Freedom Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-01',\n 'description': \"Worker's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-06-16',\n 'description': 'Youth Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-08-09',\n 'description': \"National Women's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-08-10',\n 'description': \"National Women's Day (Supplement)\",\n 'locale': 'en-ZA',\n 'notes': 'Supplement holiday',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-09-24',\n 'description': 'Heritage Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-12-16',\n 'description': 'Day of Reconciliation',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-12-26',\n 'description': 'Day of Goodwill',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.6139410138130188, "alphanum_fraction": 0.6528149843215942, "avg_line_length": 28.84000015258789, "blob_id": "f37d012cf9efa77c1fc72391ee28837120411759", "content_id": "825709f68a12495e9450a5204be71b81ce2d1868", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 746, "license_type": "permissive", "max_line_length": 66, "num_lines": 25, "path": "/src/holidata/holidays/en-US.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom .holidays import Locale\n\n\nclass en_US(Locale):\n \"\"\"\n 01-01: [NF] New Year's Day\n 07-04: [NF] Independence Day\n 11-11: [NF] Veterans Day\n 12-24: [NRF] Christmas Eve\n 12-25: [NRF] Christmas Day\n 3. monday in January: [NV] Birthday of Martin Luther King, Jr.\n 3. monday in February: [NV] Washington's Birthday\n 3. monday in April: [MA,ME] [V] Patriots' Day\n 1. last monday in May: [NV] Memorial Day\n 1. monday in September: [NV] Labor Day\n 2. monday in October: [NV] Columbus Day\n 4. thursday in November: [NV] Thanksgiving Day\n 4. friday in November: [NV] Day after Thanksgiving\n \"\"\"\n\n locale = \"en-US\"\n easter_type = EASTER_WESTERN\n" }, { "alpha_fraction": 0.5779660940170288, "alphanum_fraction": 0.6389830708503723, "avg_line_length": 23.58333396911621, "blob_id": "77a2805fe430863d82f67fdfa6d658312639cac7", "content_id": "de746c09695c75d72b1e31f791a35a2fdddab78f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 597, "license_type": "permissive", "max_line_length": 50, "num_lines": 24, "path": "/src/holidata/holidays/fr-BE.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom .holidays import Locale\n\n\nclass fr_BE(Locale):\n \"\"\"\n 01-01: [NF] Nouvel An\n 05-01: [NF] Fête du Travail\n 07-21: [NF] Fête nationale\n 08-15: [NRF] Assomption\n 11-01: [NRF] Toussaint\n 11-11: [NF] Jour de l'armistice\n 12-25: [NRF] Noël\n Easter: [NRV] Pâques\n 1 day after Easter: [NRV] Lundi de Pâques\n 39 days after Easter: [NRV] Ascension\n 49 days after Easter: [NRV] Pentecôte\n 50 days after Easter: [NRV] Lundi de Pentecôte\n \"\"\"\n\n locale = \"fr-BE\"\n easter_type = EASTER_WESTERN\n" }, { "alpha_fraction": 0.3191857933998108, "alphanum_fraction": 0.36422693729400635, "avg_line_length": 20.79245376586914, "blob_id": "7ad7b696903c81e86fd511c166c3c20bbd7b2e69", "content_id": "61b3be906a6a4cf3849773b9e15c9bbca1faabb2", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2315, "license_type": "permissive", "max_line_length": 89, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hr_HR-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': 'Nova Godina',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-01-06',\n 'description': 'Sveta tri kralja',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-04-24',\n 'description': 'Uskrs',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-25',\n 'description': 'Uskršnji ponedjeljak',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-05-01',\n 'description': 'Praznik rada',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-05-30',\n 'description': 'Dan državnosti',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-22',\n 'description': 'Dan antifašističke borbe',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-23',\n 'description': 'Tijelovo',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-08-05',\n 'description': 'Dan pobjede i domovinske zahvalnosti i Dan hrvatskih branitelja',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-08-15',\n 'description': 'Velika Gospa',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-11-01',\n 'description': 'Dan svih svetih',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Božić',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-26',\n 'description': 'Sveti Stjepan',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3322019577026367, "alphanum_fraction": 0.3763258457183838, "avg_line_length": 21.245283126831055, "blob_id": "6a98353b52e5c846c96e7ba9547cdcd87fc0142f", "content_id": "228541edb0887d084ed34ad3a9dead70b958786e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2378, "license_type": "permissive", "max_line_length": 59, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pl_PL-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': 'Nowy Rok',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-01-06',\n 'description': 'Trzech Króli',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-04-24',\n 'description': 'Wielkanoc',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-25',\n 'description': 'Poniedziałek Wielkanocny',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-05-01',\n 'description': 'Święto Pracy',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-05-03',\n 'description': 'Święto Konstytucji Trzeciego Maja',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-12',\n 'description': 'Zielone Świątki',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-23',\n 'description': 'Boże Ciało',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-08-15',\n 'description': 'Wniebowzięcie Najświętszej Maryi Panny',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-11-01',\n 'description': 'Wszystkich Świętych',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-11-11',\n 'description': 'Narodowe Święto Niepodległości',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Boże Narodzenie (pierwszy dzień)',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-26',\n 'description': 'Boże Narodzenie (drugi dzień)',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.32209739089012146, "alphanum_fraction": 0.3670412003993988, "avg_line_length": 20.66216278076172, "blob_id": "1ecfccf8a1b67e364a7648829a83d3bfded37e10", "content_id": "e44bd9fc723b2b98d644eb90daed33a1d4b087ee", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1738, "license_type": "permissive", "max_line_length": 52, "num_lines": 74, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': 'Новый Год',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-01-07',\n 'description': 'Рождество Христово',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-02-23',\n 'description': 'День защитника Отечества',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-03-08',\n 'description': 'Международный женский день',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-01',\n 'description': 'Праздник весны и труда',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-05',\n 'description': 'Пасха',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-09',\n 'description': 'День Победы',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-06-12',\n 'description': 'День России',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-11-04',\n 'description': 'День народного единства',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.5982142686843872, "alphanum_fraction": 0.5982142686843872, "avg_line_length": 15, "blob_id": "922736aa5222fa7c414d3702264640e994eb83fc", "content_id": "98bafb27e8be8e03ab5d766d3e85e8b3242c30b8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 112, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/GB.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass GB(Country):\n id = \"GB\"\n languages = [\"en\"]\n default_lang = \"en\"\n" }, { "alpha_fraction": 0.3192431628704071, "alphanum_fraction": 0.3643317222595215, "avg_line_length": 20.798246383666992, "blob_id": "1a136e2e86c3eebf0c1637ad09b77fa146135dc5", "content_id": "20208ff958418841b70c3703ecce4f93c78d3b10", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2495, "license_type": "permissive", "max_line_length": 54, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': 'Ano Novo',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-03-08',\n 'description': 'Carnaval',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-22',\n 'description': 'Sexta-feira Santa',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-24',\n 'description': 'Páscoa',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-25',\n 'description': 'Dia da Liberdade',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-05-01',\n 'description': 'Dia do Trabalhador',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-10',\n 'description': 'Dia de Portugal',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-23',\n 'description': 'Corpo de Deus',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-08-15',\n 'description': 'Assunção de Nossa Senhora',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-10-05',\n 'description': 'Implantação da República',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-11-01',\n 'description': 'Dia de Todos os Santos',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-12-01',\n 'description': 'Restauração da Independência',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-12-08',\n 'description': 'Imaculada Conceição',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Natal',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31509268283843994, "alphanum_fraction": 0.3614298403263092, "avg_line_length": 20.38679313659668, "blob_id": "251c09df78ed7302aa93e31342a25c43a8f3dbf1", "content_id": "defe4ed6073126919615406ca86af1b248aeca7b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2293, "license_type": "permissive", "max_line_length": 42, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': 'Uudenvuodenpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-01-06',\n 'description': 'Loppiainen',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-04-03',\n 'description': 'Pitkäperjantai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-05',\n 'description': 'Pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-06',\n 'description': '2. pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-01',\n 'description': 'Vappu',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-14',\n 'description': 'Helatorstai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-24',\n 'description': 'Helluntaipäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-06-20',\n 'description': 'Juhannuspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-10-31',\n 'description': 'Pyhäinpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-12-06',\n 'description': 'Itsenäisyyspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Joulupäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-26',\n 'description': 'Tapaninpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.320070743560791, "alphanum_fraction": 0.3648688495159149, "avg_line_length": 21.03896141052246, "blob_id": "1c09bad584ffc4c15512dda79cab7b080274961a", "content_id": "caa5f17dc7a88c551c4a854333c0164fa860fa06", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3406, "license_type": "permissive", "max_line_length": 53, "num_lines": 154, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_CO-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': 'Año Nuevo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-01-07',\n 'description': 'Día de los Reyes Magos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-03-25',\n 'description': 'Día de San José',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-03-28',\n 'description': 'Jueves Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-03-29',\n 'description': 'Viernes Santo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-03-31',\n 'description': 'Domingo de Pascua',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-01',\n 'description': 'Día del Trabajo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-13',\n 'description': 'La Ascensión del Señor',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-06-03',\n 'description': 'Corpus Christi',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-06-10',\n 'description': 'El Sagrado Corazón de Jesús',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-07-01',\n 'description': 'San Pedro y San Pablo',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-07-20',\n 'description': 'Grito de Independencia',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-08-07',\n 'description': 'Batalla de Boyacá',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-08-19',\n 'description': 'Asunción de la Virgen',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-10-14',\n 'description': 'Día de la Raza',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2013-11-04',\n 'description': 'Todos los Santos',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-11-11',\n 'description': 'Independencia de Cartagena',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2013-12-08',\n 'description': 'Inmaculada Concepción',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-25',\n 'description': 'Navidad',\n 'locale': 'es-CO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.32159623503685, "alphanum_fraction": 0.36760562658309937, "avg_line_length": 20.744897842407227, "blob_id": "6f6b121d4b310599833396bb6bd60076f2596ec4", "content_id": "482d34259f2cc8818cb2d01511d97d60b28764c2", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2149, "license_type": "permissive", "max_line_length": 67, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2012] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2012-01-01',\n 'description': 'Uusaasta',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-02-24',\n 'description': 'Iseseisvuspäev, Eesti Vabariigi aastapäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-04-06',\n 'description': 'Suur reede',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-08',\n 'description': 'Ülestõusmispühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-01',\n 'description': 'Kevadpüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-27',\n 'description': 'Nelipühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-06-23',\n 'description': 'Võidupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-06-24',\n 'description': 'Jaanipäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-08-20',\n 'description': 'Taasiseseisvumispäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-12-24',\n 'description': 'Jõululaupäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-12-25',\n 'description': 'Esimene jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-12-26',\n 'description': 'Teine jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31509268283843994, "alphanum_fraction": 0.3614298403263092, "avg_line_length": 20.38679313659668, "blob_id": "79efecbe0e3362a010b8d8f33015d85090e42eef", "content_id": "33aae39b40aafca4467724b8ee2dc75512bc3b18", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2293, "license_type": "permissive", "max_line_length": 42, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': 'Uudenvuodenpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-01-06',\n 'description': 'Loppiainen',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-04-22',\n 'description': 'Pitkäperjantai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-24',\n 'description': 'Pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-25',\n 'description': '2. pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-05-01',\n 'description': 'Vappu',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-06-02',\n 'description': 'Helatorstai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-12',\n 'description': 'Helluntaipäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-06-25',\n 'description': 'Juhannuspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-11-05',\n 'description': 'Pyhäinpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-12-06',\n 'description': 'Itsenäisyyspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Joulupäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-26',\n 'description': 'Tapaninpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.47429218888282776, "alphanum_fraction": 0.4856172204017639, "avg_line_length": 30.762590408325195, "blob_id": "b144478e83fc46822995a152ec0414e6bd6a85d4", "content_id": "df61c1f1e481df5f17315c62faba3b4b4f291847", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4415, "license_type": "permissive", "max_line_length": 73, "num_lines": 139, "path": "/src/holidata/holidays/en-NZ.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom holidata.utils import SmartDayArrow\nfrom .holidays import Holiday, Locale\n\n\nclass en_NZ(Locale):\n \"\"\"\n 01-01: [NF] New Year's Day\n 02-06: [NF] Waitangi Day\n 04-25: [NF] ANZAC Day\n 12-25: [NRF] Christmas Day\n 12-26: [NF] Boxing Day\n 2 days before Easter: [NRV] Good Friday\n 1 day after Easter: [NRV] Easter Monday\n 1. monday in june: [NV] Queen's Birthday\n 4. monday in october: [NV] Labour Day\n \"\"\"\n\n locale = \"en-NZ\"\n easter_type = EASTER_WESTERN\n\n def holiday_new_years_day_observed(self):\n date = SmartDayArrow(self.year, 1, 1)\n\n if date.weekday() in [\"saturday\", \"sunday\"]:\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=date.shift_to_weekday(\"monday\", including=True),\n description=\"New Year's Day (observed)\",\n flags=\"NV\",\n notes=\"\")]\n\n return []\n\n def holiday_after_new_years_day(self):\n date = SmartDayArrow(self.year, 1, 2)\n\n if date.weekday() in [\"sunday\", \"monday\"]:\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=date.shift_to_weekday(\"tuesday\", including=True),\n description=\"Day after New Year's Day\",\n flags=\"NV\",\n notes=\"\")]\n\n elif date.weekday() == \"saturday\":\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=date.shift_to_weekday(\"monday\", including=True),\n description=\"Day after New Year's Day\",\n flags=\"NV\",\n notes=\"\")]\n\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=date,\n description=\"Day after New Year's Day\",\n flags=\"NV\",\n notes=\"\")]\n\n def holiday_waitangi_day_observed(self):\n date = SmartDayArrow(self.year, 2, 6)\n\n if self.year > 2016 and date.weekday() in [\"saturday\", \"sunday\"]:\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=date.shift_to_weekday(\"monday\", including=True),\n description=\"Waitangi Day (observed)\",\n flags=\"NV\",\n notes=\"\")]\n\n return []\n\n def holiday_anzac_day_observed(self):\n date = SmartDayArrow(self.year, 4, 25)\n\n if self.year > 2015 and date.weekday() in [\"saturday\", \"sunday\"]:\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=date.shift_to_weekday(\"monday\", including=True),\n description=\"ANZAC Day (observed)\",\n flags=\"NV\",\n notes=\"\")]\n\n return []\n\n def holiday_christmas_day_observed(self):\n date = SmartDayArrow(self.year, 12, 25)\n\n if date.weekday() == \"sunday\":\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=date.shift_to_weekday(\"tuesday\", including=True),\n description=\"Christmas Day (observed)\",\n flags=\"NV\",\n notes=\"\")]\n\n elif date.weekday() == \"saturday\":\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=date.shift_to_weekday(\"monday\", including=True),\n description=\"Christmas Day (observed)\",\n flags=\"NV\",\n notes=\"\")]\n\n return []\n\n def holiday_boxing_day_observed(self):\n date = SmartDayArrow(self.year, 12, 26)\n\n if date.weekday() == \"sunday\":\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=date.shift_to_weekday(\"tuesday\", including=True),\n description=\"Boxing Day (observed)\",\n flags=\"NV\",\n notes=\"\")]\n\n elif date.weekday() == \"saturday\":\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=date.shift_to_weekday(\"monday\", including=True),\n description=\"Boxing Day (observed)\",\n flags=\"NV\",\n notes=\"\")]\n\n return []\n" }, { "alpha_fraction": 0.3218020796775818, "alphanum_fraction": 0.36685439944267273, "avg_line_length": 20.8157901763916, "blob_id": "e9cd5f38815aa2895d5459856952603a77c40c9b", "content_id": "ba4af9ee95327e270d166fffcbb03140d3172ffc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2493, "license_type": "permissive", "max_line_length": 56, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': 'Novo leto',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-02-08',\n 'description': 'Prešernov dan',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-03-31',\n 'description': 'Velikonočna nedelja',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Velikonočni ponedeljek',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-04-27',\n 'description': 'Dan upora proti okupatorju',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-01',\n 'description': 'Praznik dela',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-02',\n 'description': 'Praznik dela',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-20',\n 'description': 'Binkošti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-06-25',\n 'description': 'Dan državnosti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-08-15',\n 'description': 'Marijino vnebovzetje',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-10-31',\n 'description': 'Dan reformacije',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-11-01',\n 'description': 'Dan spomina na mrtve',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-12-25',\n 'description': 'Božič',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-12-26',\n 'description': 'Dan samostojnosti in enotnosti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31302428245544434, "alphanum_fraction": 0.35894039273262024, "avg_line_length": 20.37735939025879, "blob_id": "a6796eabe5f213e90bbeafb71f35f96d2c05623b", "content_id": "bff333452208b45015241cdabc771cfcfcf003b5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2273, "license_type": "permissive", "max_line_length": 47, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': 'Nyårsdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-01-06',\n 'description': 'Trettondedagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-03-29',\n 'description': 'Långfredagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-03-31',\n 'description': 'Påskdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Annandag påsk',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-01',\n 'description': 'Första maj',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-09',\n 'description': 'Kristi himmelfärdsdag',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-19',\n 'description': 'Pingst',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-06-22',\n 'description': 'Midsommardagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-11-02',\n 'description': 'Alla helgons dag',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-12-06',\n 'description': 'Självständighetsdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-12-25',\n 'description': 'Juldagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-26',\n 'description': 'Annandag jul',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.30328208208084106, "alphanum_fraction": 0.35189032554626465, "avg_line_length": 20.305309295654297, "blob_id": "dfdb7d2fb39b753232b2b6c101416e18cfbfef4e", "content_id": "6c74fb01d6405d2015e3815256bae4f505aed027", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4817, "license_type": "permissive", "max_line_length": 49, "num_lines": 226, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-01-06',\n 'description': 'Heilige drei Könige',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '2',\n 'type': 'RF'\n },\n {\n 'date': '2014-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '6',\n 'type': 'RF'\n },\n {\n 'date': '2014-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '7',\n 'type': 'RF'\n },\n {\n 'date': '2014-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '8',\n 'type': 'RF'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Karfreitag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-20',\n 'description': 'Ostersonntag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Ostermontag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Staatsfeiertag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-04',\n 'description': 'Florian',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '4',\n 'type': 'F'\n },\n {\n 'date': '2014-05-29',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-08',\n 'description': 'Pfingstsonntag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Pfingstmontag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-19',\n 'description': 'Fronleichnam',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-09-24',\n 'description': 'Rupert',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '5',\n 'type': 'F'\n },\n {\n 'date': '2014-10-10',\n 'description': 'Tag der Volksabstimmung',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '2',\n 'type': 'F'\n },\n {\n 'date': '2014-10-26',\n 'description': 'Nationalfeiertag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-11-11',\n 'description': 'Martin',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '1',\n 'type': 'F'\n },\n {\n 'date': '2014-11-15',\n 'description': 'Leopold',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '9',\n 'type': 'F'\n },\n {\n 'date': '2014-11-15',\n 'description': 'Leopold',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '3',\n 'type': 'F'\n },\n {\n 'date': '2014-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-24',\n 'description': 'Heiliger Abend',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Christtag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Stefanitag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-12-31',\n 'description': 'Silvester',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3191857933998108, "alphanum_fraction": 0.36422693729400635, "avg_line_length": 20.79245376586914, "blob_id": "c94b49236bb8bab332d6294bd887a72d04193cc8", "content_id": "da4dd2b11b7a6701b654126f5326fac63ca7d492", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2315, "license_type": "permissive", "max_line_length": 89, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hr_HR-2012] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2012-01-01',\n 'description': 'Nova Godina',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-01-06',\n 'description': 'Sveta tri kralja',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-04-08',\n 'description': 'Uskrs',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-09',\n 'description': 'Uskršnji ponedjeljak',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-01',\n 'description': 'Praznik rada',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-30',\n 'description': 'Dan državnosti',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-06-07',\n 'description': 'Tijelovo',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-06-22',\n 'description': 'Dan antifašističke borbe',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-08-05',\n 'description': 'Dan pobjede i domovinske zahvalnosti i Dan hrvatskih branitelja',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-08-15',\n 'description': 'Velika Gospa',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-11-01',\n 'description': 'Dan svih svetih',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-25',\n 'description': 'Božić',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-26',\n 'description': 'Sveti Stjepan',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3191489279270172, "alphanum_fraction": 0.3645389974117279, "avg_line_length": 20.59183692932129, "blob_id": "6eae3de6d44b15e284bc5289134c3d843dd68ecd", "content_id": "6c87b7146bb0901169ec9a8ccd73355e2fcbb530", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2115, "license_type": "permissive", "max_line_length": 53, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_BE-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Nieuwjaar',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-04-17',\n 'description': 'Pasen',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Paasmaandag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Dag van de arbeid',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-26',\n 'description': 'Onze Lieve Heer hemelvaart',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-05',\n 'description': 'Pinksteren',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Pinkstermaandag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-07-21',\n 'description': 'Nationale feestdag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Onze Lieve Vrouw hemelvaart',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-11-11',\n 'description': 'Wapenstilstand',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Kerstmis',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.32876327633857727, "alphanum_fraction": 0.37381085753440857, "avg_line_length": 21.20186424255371, "blob_id": "f4a76b9df93dbcf2f4b238f2b7634371f46e8933", "content_id": "008a2646d077dbbe89e2051c0b6222332a2f582c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7152, "license_type": "permissive", "max_line_length": 130, "num_lines": 322, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_DE-2020] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2020-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-01-06',\n 'description': 'Heilige drei Könige',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BW',\n 'type': 'RF'\n },\n {\n 'date': '2020-01-06',\n 'description': 'Heilige drei Könige',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BY',\n 'type': 'RF'\n },\n {\n 'date': '2020-01-06',\n 'description': 'Heilige drei Könige',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'ST',\n 'type': 'RF'\n },\n {\n 'date': '2020-03-08',\n 'description': 'Internationaler Frauentag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BE',\n 'type': 'F'\n },\n {\n 'date': '2020-04-10',\n 'description': 'Karfreitag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-12',\n 'description': 'Ostern',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Ostermontag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-01',\n 'description': 'Erster Maifeiertag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-08',\n 'description': '75. Jahrestag der Befreiung vom Nationalsozialismus und der Beendigung des Zweiten Weltkrieges in Europa',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BE',\n 'type': 'F'\n },\n {\n 'date': '2020-05-21',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-31',\n 'description': 'Pfingstsonntag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-06-01',\n 'description': 'Pfingstmontag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-06-11',\n 'description': 'Fronleichnam',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BW',\n 'type': 'RV'\n },\n {\n 'date': '2020-06-11',\n 'description': 'Fronleichnam',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BY',\n 'type': 'RV'\n },\n {\n 'date': '2020-06-11',\n 'description': 'Fronleichnam',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'HE',\n 'type': 'RV'\n },\n {\n 'date': '2020-06-11',\n 'description': 'Fronleichnam',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RV'\n },\n {\n 'date': '2020-06-11',\n 'description': 'Fronleichnam',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'RP',\n 'type': 'RV'\n },\n {\n 'date': '2020-06-11',\n 'description': 'Fronleichnam',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'SL',\n 'type': 'RV'\n },\n {\n 'date': '2020-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'SL',\n 'type': 'RF'\n },\n {\n 'date': '2020-10-03',\n 'description': 'Tag der Deutschen Einheit',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BB',\n 'type': 'RF'\n },\n {\n 'date': '2020-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BH',\n 'type': 'RF'\n },\n {\n 'date': '2020-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'HH',\n 'type': 'RF'\n },\n {\n 'date': '2020-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'MV',\n 'type': 'RF'\n },\n {\n 'date': '2020-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'NI',\n 'type': 'RF'\n },\n {\n 'date': '2020-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'SH',\n 'type': 'RF'\n },\n {\n 'date': '2020-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'SN',\n 'type': 'RF'\n },\n {\n 'date': '2020-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'ST',\n 'type': 'RF'\n },\n {\n 'date': '2020-10-31',\n 'description': 'Reformationstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'TH',\n 'type': 'RF'\n },\n {\n 'date': '2020-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BW',\n 'type': 'RF'\n },\n {\n 'date': '2020-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'BY',\n 'type': 'RF'\n },\n {\n 'date': '2020-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'NW',\n 'type': 'RF'\n },\n {\n 'date': '2020-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'RP',\n 'type': 'RF'\n },\n {\n 'date': '2020-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'SL',\n 'type': 'RF'\n },\n {\n 'date': '2020-11-18',\n 'description': 'Buß- und Bettag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': 'SN',\n 'type': 'RV'\n },\n {\n 'date': '2020-12-24',\n 'description': 'Heilig Abend',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-25',\n 'description': 'Weihnachtstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-26',\n 'description': 'Zweiter Weihnachtstag',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-31',\n 'description': 'Silvester',\n 'locale': 'de-DE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.37711507081985474, "alphanum_fraction": 0.4381345212459564, "avg_line_length": 35.22988510131836, "blob_id": "82f960b1ed41a7226c2f8d95cd0b8060107717e4", "content_id": "ee2525aba001f30f99d10dd7cd636f7502033741", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9559, "license_type": "permissive", "max_line_length": 125, "num_lines": 261, "path": "/src/holidata/holidays/hu-HU.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom holidata.utils import easter, SmartDayArrow\nfrom .holidays import Locale, Holiday\n\n\"\"\"\nsources:\n- Hungarian Constitution - Article J (1)\n https://www.keh.hu/magyarorszag_alaptorvenye/1515-Magyarorszag_Alaptorvenye\n- 2017. évi XIII. törvény egyes törvényeknek a nagypéntek munkaszüneti nappá történő nyilvánításával összefüggő módosításáról\n https://mkogy.jogtar.hu/jogszabaly?docid=A1700013.TV\n\"\"\"\n\n\nclass hu_HU(Locale):\n \"\"\"\n 01-01: [NF] Újév\n 03-15: [NF] Az 1848-as forradalom ünnepe\n 05-01: [NF] A munka ünnepe\n 08-20: [NF] Az államalapítás ünnepe\n 10-23: [NF] Az 1956-os forradalom ünnepe\n 11-01: [NRF] Mindenszentek\n 12-25: [NRF] Karácsony\n 12-26: [NRF] Karácsony\n Easter: [NRV] Húsvét\n 1 day after Easter: [NRV] Húsvéthétfő\n 49 days after Easter: [NRV] Pünkösd\n 50 days after Easter: [NRV] Pünkösdhétfő\n \"\"\"\n\n locale = \"hu-HU\"\n easter_type = EASTER_WESTERN\n\n def holiday_nagypentek(self):\n \"\"\"\n 2 days before Easter: [NRV] Nagypéntek (since 2017)\n \"\"\"\n if self.year >= 2017:\n return [Holiday(\n self.locale,\n \"\",\n easter(self.year, self.easter_type).shift(days=-2),\n \"Nagypéntek\",\n \"NRV\"\n )]\n else:\n return []\n\n def holiday_munkaszuneti_nap(self):\n \"\"\"\n Non-Working days (Munkaszüneti nap)\n When a public holiday falls on a Tuesday or a Thursday, a special decree swaps the preceding Monday or the\n following Friday (respectively) with a not too distant Saturday\n 2015: 28/2014. (IX. 24.) NGM rendelet a 2015. évi munkaszüneti napok körüli munkarendről\n 2016: 8/2015. (VI. 29.) NGM rendelet a 2016. évi munkaszüneti napok körüli munkarendről\n 2018: 9/2017. (V. 19.) NGM rendelet a 2018. évi munkaszüneti napok körüli munkarendről\n 2019: 6/2018. (VIII. 23.) PM rendelet a 2019. évi munkaszüneti napok körüli munkarendről\n 2020: 7/2019. (VI. 25.) PM rendelet a 2020. évi munkaszüneti napok körüli munkarendről\n 2021: 14/2020. (V. 13.) ITM rendelet a 2021. évi munkaszüneti napok körüli munkarendről\n 2022: 23/2021. (VI. 1.) ITM rendelet a 2022. évi munkaszüneti napok körüli munkarendről\n \"\"\"\n if self.year == 2015:\n \"\"\"\n 01-02, swapped with 01-10\n 08-21, swapped with 08-08\n 12-24, swapped with 12-12\n \"\"\"\n return [\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 1, 2),\n description=\"Munkaszüneti nap\",\n flags=\"NF\",\n notes=\"2015-01-10 munkanap\"\n ),\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 8, 21),\n description=\"Munkaszüneti nap\",\n flags=\"NF\",\n notes=\"2015-08-08 munkanap\"\n ),\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 12, 24),\n description=\"Munkaszüneti nap\",\n flags=\"NF\",\n notes=\"2015-12-12 munkanap\"\n )]\n if self.year == 2016:\n \"\"\"\n 03-14, swapped with 03-05\n 10-31, swapped with 10-15\n \"\"\"\n return [\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 3, 14),\n description=\"Munkaszüneti nap\",\n flags=\"NF\",\n notes=\"2016-03-05 munkanap\"\n ),\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 10, 31),\n description=\"Munkaszüneti nap\",\n flags=\"NF\",\n notes=\"2016-10-15 munkanap\"\n )]\n if self.year == 2018:\n \"\"\"\n 03-16, swapped with 03-10\n 04-30, swapped with 04-21\n 10-22, swapped with 10-13\n 11-02, swapped with 11-10\n 12-24, swapped with 12-01\n 12-31, swapped with 12-15\n \"\"\"\n return [\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 3, 16),\n description=\"Munkaszüneti nap\",\n flags=\"NF\",\n notes=\"2018-03-10 munkanap\"\n ),\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 4, 30),\n description=\"Munkaszüneti nap\",\n flags=\"NF\",\n notes=\"2018-04-21 munkanap\"\n ),\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 10, 22),\n description=\"Munkaszüneti nap\",\n flags=\"NF\",\n notes=\"2018-10-13 munkanap\"\n ),\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 11, 2),\n description=\"Munkaszüneti nap\",\n flags=\"NF\",\n notes=\"2018-11-10 munkanap\"\n ),\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 12, 24),\n description=\"Munkaszüneti nap\",\n flags=\"NF\",\n notes=\"2018-12-01 munkanap\"\n ),\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 12, 31),\n description=\"Munkaszüneti nap\",\n flags=\"NF\",\n notes=\"2018-12-15 munkanap\"\n )]\n if self.year == 2019:\n \"\"\"\n 08-19, swapped with 08-10\n 12-24, swapped with 12-07\n 12-27, swapped with 12-14\n \"\"\"\n return [\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 8, 19),\n description=\"Munkaszüneti nap\",\n flags=\"NF\",\n notes=\"2019-08-10 munkanap\"\n ),\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 12, 24),\n description=\"Munkaszüneti nap\",\n flags=\"NF\",\n notes=\"2019-12-07 munkanap\"\n ),\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 12, 27),\n description=\"Munkaszüneti nap\",\n flags=\"NF\",\n notes=\"2019-12-14 munkanap\"\n )]\n if self.year == 2020:\n \"\"\"\n 08-21, swapped with 08-29\n 12-24, swapped with 12-12\n \"\"\"\n return [\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 8, 21),\n description=\"Munkaszüneti nap\",\n flags=\"NF\",\n notes=\"2020-08-29 munkanap\"),\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 12, 24),\n description=\"Munkaszüneti nap\",\n flags=\"NF\",\n notes=\"2020-12-12 munkanap\"\n )]\n if self.year == 2021:\n \"\"\"\n 12-24, swapped with 12-11\n \"\"\"\n return [\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 12, 24),\n description=\"Munkaszüneti nap\",\n flags=\"NF\",\n notes=\"2021-12-11 munkanap\"\n )]\n if self.year == 2022:\n \"\"\"\n 03-14, swapped with 03-26\n 10-31, swapped with 10-15\n \"\"\"\n return [\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 3, 14),\n description=\"Munkaszüneti nap\",\n flags=\"NF\",\n notes=\"2022-03-26 munkanap\"),\n Holiday(\n locale=self.locale,\n region=\"\",\n date=SmartDayArrow(self.year, 10, 31),\n description=\"Munkaszüneti nap\",\n flags=\"NF\",\n notes=\"2022-10-15 munkanap\"\n )]\n\n return []\n" }, { "alpha_fraction": 0.31093543767929077, "alphanum_fraction": 0.3601229786872864, "avg_line_length": 20.49056625366211, "blob_id": "12b6faea10effb6a41599134083395d5d4f3d1b1", "content_id": "389c99d22a3748d01feb3f06e460652d0fa176df", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2301, "license_type": "permissive", "max_line_length": 54, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2017] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2017-01-01',\n 'description': 'Újév',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-03-15',\n 'description': 'Az 1848-as forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-04-14',\n 'description': 'Nagypéntek',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-16',\n 'description': 'Húsvét',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-17',\n 'description': 'Húsvéthétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-05-01',\n 'description': 'A munka ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-06-04',\n 'description': 'Pünkösd',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-05',\n 'description': 'Pünkösdhétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-08-20',\n 'description': 'Az államalapítás ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-10-23',\n 'description': 'Az 1956-os forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-11-01',\n 'description': 'Mindenszentek',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-25',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-26',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3322019577026367, "alphanum_fraction": 0.3763258457183838, "avg_line_length": 21.245283126831055, "blob_id": "d1181755129787bb44036d05657daf962e67a407", "content_id": "1ca60f75706fed4c9d2ab72502b8d28a9f51f795", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2378, "license_type": "permissive", "max_line_length": 59, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pl_PL-2018] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2018-01-01',\n 'description': 'Nowy Rok',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-01-06',\n 'description': 'Trzech Króli',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-04-01',\n 'description': 'Wielkanoc',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-02',\n 'description': 'Poniedziałek Wielkanocny',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-05-01',\n 'description': 'Święto Pracy',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-05-03',\n 'description': 'Święto Konstytucji Trzeciego Maja',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-05-20',\n 'description': 'Zielone Świątki',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-05-31',\n 'description': 'Boże Ciało',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-08-15',\n 'description': 'Wniebowzięcie Najświętszej Maryi Panny',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-11-01',\n 'description': 'Wszystkich Świętych',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-11-11',\n 'description': 'Narodowe Święto Niepodległości',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-12-25',\n 'description': 'Boże Narodzenie (pierwszy dzień)',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-12-26',\n 'description': 'Boże Narodzenie (drugi dzień)',\n 'locale': 'pl-PL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3191857933998108, "alphanum_fraction": 0.36422693729400635, "avg_line_length": 20.79245376586914, "blob_id": "14c053ed0d14a1762f078cf454217015512c77bb", "content_id": "3c0a91565c2bc12a7fb8bda5a3488630782e3f7d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2315, "license_type": "permissive", "max_line_length": 89, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hr_HR-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Nova Godina',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-01-06',\n 'description': 'Sveta tri kralja',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-04-20',\n 'description': 'Uskrs',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Uskršnji ponedjeljak',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Praznik rada',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-30',\n 'description': 'Dan državnosti',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-06-19',\n 'description': 'Tijelovo',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-22',\n 'description': 'Dan antifašističke borbe',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-08-05',\n 'description': 'Dan pobjede i domovinske zahvalnosti i Dan hrvatskih branitelja',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-08-15',\n 'description': 'Velika Gospa',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Dan svih svetih',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Božić',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Sveti Stjepan',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3136601746082306, "alphanum_fraction": 0.3593526780605316, "avg_line_length": 20.4489803314209, "blob_id": "2edcfa24c4a73968168e160c6951e004e9892348", "content_id": "006aed267824b2f85a6edf7757d65a89f61d439e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2101, "license_type": "permissive", "max_line_length": 49, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[it_IT-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': 'Capodanno',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-01-06',\n 'description': 'Epifania',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-03-27',\n 'description': 'Pasqua',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-28',\n 'description': 'Pasquetta',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-04-25',\n 'description': 'Festa della liberazione',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-01',\n 'description': 'Festa del lavoro',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-06-02',\n 'description': 'Festa della repubblica',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-08-15',\n 'description': 'Assunzione (ferragosto)',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-11-01',\n 'description': 'Ognissanti',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-08',\n 'description': 'Immacolata concezione',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-25',\n 'description': 'Natale',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'S.to Stefano',\n 'locale': 'it-IT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3191489279270172, "alphanum_fraction": 0.3645389974117279, "avg_line_length": 20.59183692932129, "blob_id": "3c268a954e9d37da1b43cc9a065849aeb0632659", "content_id": "c382d4797d59798eb3fc2fbcfc8eaa47db73b7fe", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2115, "license_type": "permissive", "max_line_length": 53, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_BE-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': 'Nieuwjaar',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-04-09',\n 'description': 'Pasen',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-10',\n 'description': 'Paasmaandag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-01',\n 'description': 'Dag van de arbeid',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-18',\n 'description': 'Onze Lieve Heer hemelvaart',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-28',\n 'description': 'Pinksteren',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-29',\n 'description': 'Pinkstermaandag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-07-21',\n 'description': 'Nationale feestdag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-08-15',\n 'description': 'Onze Lieve Vrouw hemelvaart',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-11-11',\n 'description': 'Wapenstilstand',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Kerstmis',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.32693910598754883, "alphanum_fraction": 0.37322768568992615, "avg_line_length": 21.632076263427734, "blob_id": "9743248caa5b47588b7c44ca2874d39209c898a7", "content_id": "bd677dd130045feb2cdff5bf12a8531ac8b579bf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2427, "license_type": "permissive", "max_line_length": 65, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': 'Yılbaşı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-04-23',\n 'description': 'Ulusal Egemenlik ve Çocuk Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-01',\n 'description': 'Emek ve Dayanışma Günü',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-19',\n 'description': \"Atatürk'ü Anma, Gençlik ve Spor Bayramı\",\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-07-05',\n 'description': 'Ramazan Bayramı (1. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-07-06',\n 'description': 'Ramazan Bayramı (2. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-07-07',\n 'description': 'Ramazan Bayramı (3. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-08-30',\n 'description': 'Zafer Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-09-12',\n 'description': 'Kurban Bayramı (1. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-09-13',\n 'description': 'Kurban Bayramı (2. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-09-14',\n 'description': 'Kurban Bayramı (3. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-09-15',\n 'description': 'Kurban Bayramı (4. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-10-29',\n 'description': 'Cumhuriyet Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.7216981053352356, "alphanum_fraction": 0.7337049841880798, "avg_line_length": 34.33333206176758, "blob_id": "aeffd8790b80f3ada201dc735b4e7fcb9132f875", "content_id": "15914d95c6ce0b0d659fa67baaef8be42cf58db6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2332, "license_type": "permissive", "max_line_length": 151, "num_lines": 66, "path": "/README.md", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# Holidata\n\n`holidata` is a utility for algorithmically producing holiday data.\nIts purpose is mainly for [holidata.net](https://holidata.net).\n\nHoliday data can be produced for a given year in a supported locale and output format.\n\n## Usage\n\n```\nholidata --year=<value> --locale=<value> [--output=<value>]\nholidata --year=<value> --country=<value> [--lang=<value>] [--output=<value>]\n```\nCall `holidata` with the `--help` option to more detailed information.\n\n### Examples\n* Create holiday data of year `2022` for locale `de-DE`:\n ```\n $ holidata --year=2022 --locale=de-DE\n ```\n \n* Create holiday data of year `2022` for country `BE` and language `fr`:\n ```\n $ holidata --year=2022 --country=BE --lang=fr\n ```\n \n* Create holiday data of year `2022` for country `US` (default language `en`):\n ```\n $ holidata --year=2022 --country=US\n ```\n\n## Data\n\nFor each holiday the following data is provided:\n* `locale` - language and country the holiday is defined for\n* `region` - region code of the given subdivision the holiday is defined for\n* `date` - actual date the holiday takes place\n* `description` - name of the holiday in the given language\n* `type` - holiday type flags\n* `notes` - additional information\n\n## Locales\n\nHolidata provides holiday data in different locales, i.e. for a given country and in a given language.\nSee [holidata.net](https://holidata.net/locales/) for a complete overview of the currently provided locales.\n\nIf you think a locale is missing, [open a feature request on GitHub](https://github.com/GothenburgBitFactory/holidata/issues).\n\n## Output Formats\n\nHolidata supports different output formats, currently `csv`, `json`, `yaml`, and `xml`.\n\nIf you think an output format is missing, [open a feature request on GitHub](https://github.com/GothenburgBitFactory/holidata/issues).\n\n## Limitations\n\nHolidata focuses on holidays which are _defined by law on which business or work are suspended or reduced_ (there may be some exceptions to that rule).\n\nHolidata only provides data for countries and their principal subdivisions (both as they are defined in ISO 3166).\nHolidays for other subdivisions are either merged or ignored.\nThere is also no explicit representation of partial holidays.\n\n## License\n\n`holidata` is released under the MIT license.\nFor details check the [LICENSE](LICENSE) file.\n" }, { "alpha_fraction": 0.5275128483772278, "alphanum_fraction": 0.5308144092559814, "avg_line_length": 28.31182861328125, "blob_id": "8356096df62f302df51a4cfc38c374b20822db93", "content_id": "afce626f5b0175faa3a5c5a2add25a499dd15b81", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2726, "license_type": "permissive", "max_line_length": 142, "num_lines": 93, "path": "/src/holidata/emitters.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "import csv\nimport io\nimport json\n\nfrom holidata.plugin import PluginMount\n\nclass Emitter(object, metaclass=PluginMount):\n type = None\n\n def __init__(self):\n if self.type is None:\n raise ValueError(\"Emitter {0} does not provide its type\".format(self.__class__.__name__))\n\n @staticmethod\n def get(identifier):\n return Emitter.get_plugin(identifier, \"type\")\n\n def output(self, locale):\n pass\n\n\nclass JsonEmitter(Emitter):\n type = \"json\"\n\n def output(self, locale):\n export_data = [h.as_dict() for h in locale.holidays]\n export_data.sort(key=lambda x: x[\"date\"])\n return \"\\n\".join([json.dumps(h, ensure_ascii=False, sort_keys=False, indent=None, separators=(\",\", \":\")) for h in export_data]) + \"\\n\"\n\n\nclass CsvEmitter(Emitter):\n type = \"csv\"\n\n def output(self, locale):\n export_data = [h.as_dict() for h in locale.holidays]\n export_data.sort(key=lambda x: x[\"date\"])\n result = io.StringIO()\n\n writer = csv.DictWriter(result,\n [\"locale\", \"region\", \"date\", \"description\", \"type\", \"notes\"],\n quoting=csv.QUOTE_ALL,\n lineterminator=\"\\n\")\n writer.writeheader()\n writer.writerows(export_data)\n\n return result.getvalue()\n\n\nclass YamlEmitter(Emitter):\n type = \"yaml\"\n\n def output(self, locale):\n export_data = [h.as_dict() for h in locale.holidays]\n export_data.sort(key=lambda x: x[\"date\"])\n\n output = \"%YAML 1.1\\n\"\n output += \"---\\n\"\n for holiday in export_data:\n output += \" holiday:\\n\"\n\n for key in [\"locale\", \"region\", \"date\", \"description\", \"type\", \"notes\"]:\n value = holiday[key]\n\n if value is not None and value != \"\":\n output += \" {}: {}\\n\".format(key, value)\n else:\n output += \" {}:\\n\".format(key)\n\n output += \"...\\n\"\n return output\n\n\nclass XmlEmitter(Emitter):\n type = \"xml\"\n\n def output(self, locale):\n export_data = [h.as_dict() for h in locale.holidays]\n export_data.sort(key=lambda x: x[\"date\"])\n\n output = \"<?xml version=\\\"1.0\\\" encoding=\\\"UTF-8\\\" ?>\\n\"\n output += \"<holidays>\\n\"\n\n for holiday in export_data:\n output += \" <holiday>\\n\"\n\n for key in [\"locale\", \"region\", \"date\", \"description\", \"type\", \"notes\"]:\n value = holiday[key] if key in holiday else \"\"\n output += \" <{0}>{1}</{0}>\\n\".format(key, value if value is not None else \"\")\n\n output += \" </holiday>\\n\"\n\n output += \"</holidays>\\n\"\n return output\n" }, { "alpha_fraction": 0.31640625, "alphanum_fraction": 0.3615451455116272, "avg_line_length": 20.745283126831055, "blob_id": "1e7289a239f5d7fb13bbb4e898caeab38dace24f", "content_id": "e5f212bcc724d2607e467386a3e3ff7515e71461", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2304, "license_type": "permissive", "max_line_length": 51, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-03-21',\n 'description': 'Human Rights Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-03-25',\n 'description': 'Good Friday',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-28',\n 'description': 'Family Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-04-27',\n 'description': 'Freedom Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-01',\n 'description': \"Worker's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-02',\n 'description': \"Worker's Day (Supplement)\",\n 'locale': 'en-ZA',\n 'notes': 'Supplement holiday',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-06-16',\n 'description': 'Youth Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-08-09',\n 'description': \"National Women's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-09-24',\n 'description': 'Heritage Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-12-16',\n 'description': 'Day of Reconciliation',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'Day of Goodwill',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3293954133987427, "alphanum_fraction": 0.37665045261383057, "avg_line_length": 21.146154403686523, "blob_id": "e49156e9192ec435d142bc26c9f470246129f732", "content_id": "8ff30f62a65ebe5640158d4df255e155e42c522f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2900, "license_type": "permissive", "max_line_length": 49, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Nýársdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-04-01',\n 'description': 'Skírdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Föstudagurinn langi',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-04',\n 'description': 'Páskadagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Annar dagur páska',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-22',\n 'description': 'Sumardagurinn fyrsti',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Verkalýðsdagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-13',\n 'description': 'Uppstigningardagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-23',\n 'description': 'Hvítasunnudagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-24',\n 'description': 'Annar dagur hvítasunnu',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-06-17',\n 'description': 'Þjóðhátíðardagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-08-02',\n 'description': 'Frídagur verslunarmanna',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-12-24',\n 'description': 'Aðfangadagur jóla',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Jóladagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Annar dagur jóla',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-31',\n 'description': 'Gamlársdagur',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.32159623503685, "alphanum_fraction": 0.36760562658309937, "avg_line_length": 20.744897842407227, "blob_id": "aeab9e627070f752ca4290e4d3c57c1a72b8f57a", "content_id": "fe19a800e7b6a8f0b02dee15d82fd961770b371d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2149, "license_type": "permissive", "max_line_length": 67, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2017] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2017-01-01',\n 'description': 'Uusaasta',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-02-24',\n 'description': 'Iseseisvuspäev, Eesti Vabariigi aastapäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-04-14',\n 'description': 'Suur reede',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-16',\n 'description': 'Ülestõusmispühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-05-01',\n 'description': 'Kevadpüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-06-04',\n 'description': 'Nelipühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-23',\n 'description': 'Võidupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-06-24',\n 'description': 'Jaanipäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-08-20',\n 'description': 'Taasiseseisvumispäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-12-24',\n 'description': 'Jõululaupäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-12-25',\n 'description': 'Esimene jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-12-26',\n 'description': 'Teine jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31302428245544434, "alphanum_fraction": 0.35894039273262024, "avg_line_length": 20.37735939025879, "blob_id": "7c8034324b4fd481ee06f81bcf3fe92c681da221", "content_id": "37a04a8c49d2940fdcbeb6a0d617ab314b6cb730", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2273, "license_type": "permissive", "max_line_length": 47, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Nyårsdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-01-06',\n 'description': 'Trettondedagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Långfredagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-20',\n 'description': 'Påskdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Annandag påsk',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Första maj',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-29',\n 'description': 'Kristi himmelfärdsdag',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-08',\n 'description': 'Pingst',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-21',\n 'description': 'Midsommardagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Alla helgons dag',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-12-06',\n 'description': 'Självständighetsdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Juldagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Annandag jul',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.32159623503685, "alphanum_fraction": 0.36760562658309937, "avg_line_length": 20.744897842407227, "blob_id": "e17972ad909d3d20a0aaa6e61f2800c064ff8b35", "content_id": "428e382b5904e39aebdc9cbc5f0a79ce33bd6871", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2149, "license_type": "permissive", "max_line_length": 67, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Uusaasta',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-02-24',\n 'description': 'Iseseisvuspäev, Eesti Vabariigi aastapäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Suur reede',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-17',\n 'description': 'Ülestõusmispühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Kevadpüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-06-05',\n 'description': 'Nelipühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-23',\n 'description': 'Võidupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-06-24',\n 'description': 'Jaanipäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-08-20',\n 'description': 'Taasiseseisvumispäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-24',\n 'description': 'Jõululaupäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Esimene jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Teine jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3224222660064697, "alphanum_fraction": 0.3669394552707672, "avg_line_length": 21.382783889770508, "blob_id": "180093d18b7d1b700dfb844881359c350cbd6369", "content_id": "50d618ca311904aa2171c344171f90ca45f35770", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12260, "license_type": "permissive", "max_line_length": 68, "num_lines": 546, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_ES-2017] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2017-01-01',\n 'description': 'Año Nuevo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-01-02',\n 'description': 'Lunes siguiente al Año Nuevo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'F'\n },\n {\n 'date': '2017-01-02',\n 'description': 'Lunes siguiente al Año Nuevo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'F'\n },\n {\n 'date': '2017-01-02',\n 'description': 'Lunes siguiente al Año Nuevo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'F'\n },\n {\n 'date': '2017-01-02',\n 'description': 'Lunes siguiente al Año Nuevo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'F'\n },\n {\n 'date': '2017-01-02',\n 'description': 'Lunes siguiente al Año Nuevo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'F'\n },\n {\n 'date': '2017-01-02',\n 'description': 'Lunes siguiente al Año Nuevo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'F'\n },\n {\n 'date': '2017-01-06',\n 'description': 'Epifanía del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-02-28',\n 'description': 'Día de Andalucía',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'F'\n },\n {\n 'date': '2017-03-01',\n 'description': 'Día de las Illes Balears',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'F'\n },\n {\n 'date': '2017-03-20',\n 'description': 'Lunes siguiente a San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'RF'\n },\n {\n 'date': '2017-03-20',\n 'description': 'Lunes siguiente a San José',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'RF'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AN',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CN',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Jueves Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-14',\n 'description': 'Viernes Santo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-16',\n 'description': 'Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-17',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-17',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'IB',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-17',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-17',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-17',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-17',\n 'description': 'Lunes de Pascua',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'RV'\n },\n {\n 'date': '2017-04-24',\n 'description': 'Fiesta de Castilla y León',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CL',\n 'type': 'F'\n },\n {\n 'date': '2017-04-24',\n 'description': 'San Jorge / Día de Aragón',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AR',\n 'type': 'RF'\n },\n {\n 'date': '2017-05-01',\n 'description': 'Fiesta del Trabajo',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-05-02',\n 'description': 'Fiesta de la Comunidad de Madrid',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MD',\n 'type': 'F'\n },\n {\n 'date': '2017-05-17',\n 'description': 'Día de las Letras Gallegas',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'F'\n },\n {\n 'date': '2017-05-30',\n 'description': 'Día de Canarias',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CN',\n 'type': 'F'\n },\n {\n 'date': '2017-05-31',\n 'description': 'Día de Castilla-La Mancha',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'F'\n },\n {\n 'date': '2017-06-09',\n 'description': 'Día de la Región de Murcia',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'MC',\n 'type': 'F'\n },\n {\n 'date': '2017-06-09',\n 'description': 'Día de La Rioja',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'RI',\n 'type': 'F'\n },\n {\n 'date': '2017-06-15',\n 'description': 'Corpus Christi',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CM',\n 'type': 'RV'\n },\n {\n 'date': '2017-06-24',\n 'description': 'San Juan',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'RF'\n },\n {\n 'date': '2017-07-25',\n 'description': 'Santiago Apóstol',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'NC',\n 'type': 'RF'\n },\n {\n 'date': '2017-07-25',\n 'description': 'Santiago Apóstol',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'PV',\n 'type': 'RF'\n },\n {\n 'date': '2017-07-25',\n 'description': 'Santiago Apóstol / Día Nacional de Galicia',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'GA',\n 'type': 'RF'\n },\n {\n 'date': '2017-07-28',\n 'description': 'Día de las Instituciones de Cantabria',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'F'\n },\n {\n 'date': '2017-08-15',\n 'description': 'Asunción de la Virgen',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-09-01',\n 'description': 'Fiesta del Sacrificio (Aid El Kebir)',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'ML',\n 'type': 'RV'\n },\n {\n 'date': '2017-09-01',\n 'description': 'Fiesta del Sacrificio (Eidul Adha)',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'RV'\n },\n {\n 'date': '2017-09-02',\n 'description': 'Día de Ceuta',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CE',\n 'type': 'F'\n },\n {\n 'date': '2017-09-08',\n 'description': 'Día de Asturias',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'AS',\n 'type': 'F'\n },\n {\n 'date': '2017-09-08',\n 'description': 'Día de Extremadura',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'EX',\n 'type': 'F'\n },\n {\n 'date': '2017-09-11',\n 'description': 'Fiesta Nacional de Cataluña',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'F'\n },\n {\n 'date': '2017-09-15',\n 'description': 'La Bien Aparecida',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CB',\n 'type': 'RF'\n },\n {\n 'date': '2017-10-09',\n 'description': 'Día de la Comunitat Valenciana',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'VC',\n 'type': 'F'\n },\n {\n 'date': '2017-10-12',\n 'description': 'Fiesta Nacional de España',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-11-01',\n 'description': 'Todos los Santos',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-06',\n 'description': 'Día de la Constitución Española',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-12-08',\n 'description': 'Inmaculada Concepción',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-25',\n 'description': 'Natividad del Señor',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-26',\n 'description': 'San Esteban',\n 'locale': 'es-ES',\n 'notes': '',\n 'region': 'CT',\n 'type': 'RF'\n }\n]" }, { "alpha_fraction": 0.30519479513168335, "alphanum_fraction": 0.35304170846939087, "avg_line_length": 20.21014404296875, "blob_id": "4d27d0c465cfad8844665f5317af985ffbd5d7be", "content_id": "824b3fb4579151b667187a048c5e1a867cd1be46", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2934, "license_type": "permissive", "max_line_length": 46, "num_lines": 138, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': 'Nyttårsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-02-10',\n 'description': 'Fastelavn',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-03-24',\n 'description': 'Palmesøndag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-03-28',\n 'description': 'Skjærtorsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-03-29',\n 'description': 'Langfredag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-03-31',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-04-01',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-01',\n 'description': 'Offentlig Høytidsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-08',\n 'description': 'Frigjøringsdag 1945',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-09',\n 'description': 'Kristi Himmelfartsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-17',\n 'description': 'Grunnlovsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-19',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-20',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-12-24',\n 'description': 'Julaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-25',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-26',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-31',\n 'description': 'Nyttårsaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3314873278141022, "alphanum_fraction": 0.37579113245010376, "avg_line_length": 21.1842098236084, "blob_id": "37fc63baa6363ab1d9774ebfd2bde047eab5fd54", "content_id": "7ec66806c6be9c735e91b5a5593a23ef0ee14db3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2768, "license_type": "permissive", "max_line_length": 93, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2012] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2012-01-01',\n 'description': 'Πρωτοχρονιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-01-06',\n 'description': 'Θεοφάνεια',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-02-27',\n 'description': 'Καθαρά Δευτέρα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-03-25',\n 'description': 'Ευαγγελισμός της Θεοτόκου και Εθνική Ημέρα Ανεξαρτησίας της Ελλάδας',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-04-13',\n 'description': 'Μεγάλη Παρασκευή',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-14',\n 'description': 'Μεγάλο Σάββατο',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-15',\n 'description': 'Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-04-16',\n 'description': 'Δευτέρα του Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-01',\n 'description': 'Πρωτομαγιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-06-04',\n 'description': 'Δευτέρα του Αγίου Πνεύματος',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-08-15',\n 'description': 'Κοίμηση της Θεοτόκου',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-10-28',\n 'description': 'Ημέρα του ΌΧΙ',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-12-25',\n 'description': 'Χριστούγεννα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-26',\n 'description': 'Επόμενη ημέρα Χριστουγέννων',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.30519479513168335, "alphanum_fraction": 0.35304170846939087, "avg_line_length": 20.21014404296875, "blob_id": "bd5b4a93db5a959980ea42977c6d8b82a956d932", "content_id": "84e89130536139f26b20c5603aa0af7ca1e96c18", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2934, "license_type": "permissive", "max_line_length": 46, "num_lines": 138, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': 'Nyttårsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-03-03',\n 'description': 'Fastelavn',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-14',\n 'description': 'Palmesøndag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-18',\n 'description': 'Skjærtorsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Langfredag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-21',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-22',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Offentlig Høytidsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-05-08',\n 'description': 'Frigjøringsdag 1945',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-05-17',\n 'description': 'Grunnlovsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-05-30',\n 'description': 'Kristi Himmelfartsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-09',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-10',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-12-24',\n 'description': 'Julaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2019-12-31',\n 'description': 'Nyttårsaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.34026622772216797, "alphanum_fraction": 0.3843593895435333, "avg_line_length": 21.688678741455078, "blob_id": "12363b6e7be1191875bbe45ee5944b488d2be006", "content_id": "96397012bf8f1dde4ce1460e68a086248169cf26", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2443, "license_type": "permissive", "max_line_length": 72, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2012] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2012-01-01',\n 'description': 'Nový rok',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-01-01',\n 'description': 'Den obnovy samostatného českého státu',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-04-09',\n 'description': 'Velikonoční pondělí',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2012-05-01',\n 'description': 'Svátek práce',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-05-08',\n 'description': 'Den vítězství',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-07-05',\n 'description': 'Den slovanských věrozvěstů Cyrila a Metoděje',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-07-06',\n 'description': 'Den upálení mistra Jana Husa',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-09-28',\n 'description': 'Den české státnosti',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-10-28',\n 'description': 'Den vzniku samostatného československého státu',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-11-17',\n 'description': 'Den boje za svobodu a demokracii',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2012-12-24',\n 'description': 'Štědrý den',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-25',\n 'description': '1. svátek vánoční',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2012-12-26',\n 'description': '2. svátek vánoční',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3143772482872009, "alphanum_fraction": 0.36884260177612305, "avg_line_length": 20.912281036376953, "blob_id": "2465bb5f692bdc70cd3a7405d3b0b5fdba836631", "content_id": "4ec5a54ab7b5937968642a6d79fb8b2fc971b092", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2522, "license_type": "permissive", "max_line_length": 54, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': 'Újév',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-03-14',\n 'description': 'Munkaszüneti nap',\n 'locale': 'hu-HU',\n 'notes': '2016-03-05 munkanap',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-03-15',\n 'description': 'Az 1848-as forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-03-27',\n 'description': 'Húsvét',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-28',\n 'description': 'Húsvéthétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-01',\n 'description': 'A munka ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-15',\n 'description': 'Pünkösd',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-16',\n 'description': 'Pünkösdhétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-08-20',\n 'description': 'Az államalapítás ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-10-23',\n 'description': 'Az 1956-os forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-10-31',\n 'description': 'Munkaszüneti nap',\n 'locale': 'hu-HU',\n 'notes': '2016-10-15 munkanap',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-11-01',\n 'description': 'Mindenszentek',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-25',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.5154223442077637, "alphanum_fraction": 0.5169996619224548, "avg_line_length": 36.78807830810547, "blob_id": "830299a21847723310a876a8339ea38fdc96f8b1", "content_id": "da3081f0acbcfab8aea48ca2e9c5ba602d79e357", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5706, "license_type": "permissive", "max_line_length": 134, "num_lines": 151, "path": "/src/holidata/holidays/holidays.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "import re\n\nfrom holidata.plugin import PluginMount\nfrom holidata.utils import SmartDayArrow, month_reference, easter\n\n\nclass Holiday(object):\n \"\"\"\n A sheer container for one holiday.\n \"\"\"\n def __init__(self, locale, region, date, description, flags=\"\", notes=\"\"):\n self.locale = locale\n self.region = region\n self.date = date\n self.description = description\n self.flags = flags\n self.notes = notes\n\n def as_dict(self):\n return {\n \"locale\": self.locale,\n \"region\": self.region,\n \"date\": self.date.strftime(\"%Y-%m-%d\"),\n \"description\": self.description,\n \"type\": self.flags,\n \"notes\": self.notes\n }\n\n\nclass Country(object, metaclass=PluginMount):\n \"\"\"\n Represents holidays of a given country\n \"\"\"\n id = None\n languages = []\n default_lang = None\n easter_type = None\n\n def __init__(self):\n if self.id is None:\n raise ValueError(\"Country '{0}' does not provide its id!\".format(self.__class__.__name__))\n\n if not self.languages:\n raise ValueError(\"Country '{0}' does not list languages!\".format(self.__class__.__name__))\n\n if self.default_lang is not None and self.default_lang not in self.languages:\n raise ValueError(\"Country '{0}' does not list language '{1}'!\".format(self.__class__.__name__, self.default_lang))\n @staticmethod\n def get(identifier):\n return Country.get_plugin(identifier, \"id\")\n\n\nclass Locale(object, metaclass=PluginMount):\n \"\"\"\n Represents holidays in a given locale.\n \"\"\"\n locale = None\n easter_type = None\n\n fixed_regex = re.compile(r\"^\\s*(?P<month>\\d\\d)-(?P<day>\\d\\d): \"\n r\"(\\[(?P<regions>[^]]+)\\]\\s+)?\"\n r\"\\[(?P<flags>[A-Z]*)\\] (?P<description>.*)$\", re.UNICODE)\n nth_weekday_regex = re.compile(r\"^\\s*(?P<order>\\d+)\\.(?P<last> last | )\"\n r\"(?P<weekday>[a-z]+) in (?P<month>[a-zA-Z]+):\\s+\"\n r\"(\\[(?P<regions>[^]]+)\\]\\s+)?\"\n r\"\\[(?P<flags>[A-Z]*)\\] (?P<description>.*)$\", re.UNICODE)\n easter_shift_regex = re.compile(r\"^\\s*((?P<days>\\d+) day(s)? (?P<direction>(before|after)) )?Easter:\\s+\"\n r\"(\\[(?P<regions>[^]]+)\\]\\s+)?\"\n r\"\\[(?P<flags>[A-Z]*)\\] (?P<description>.*)$\", re.UNICODE)\n\n def __init__(self, year):\n if self.locale is None:\n raise ValueError(\"Locale {0} does not provide its locale\".format(self.__class__.__name__))\n\n self.year = year\n\n @staticmethod\n def get(identifier):\n return Locale.get_plugin(identifier, \"locale\")\n\n @property\n def holidays(self):\n \"\"\"\n Yield all the Holiday objects corresponding to the definitions in the\n self.__doc__ and also as given by the dynamic self.holiday_* methods.\n \"\"\"\n # First process lines in the __doc__\n for line in self.__doc__.splitlines():\n # Skip empty lines\n if not line.strip():\n continue\n\n holidata = self._parse_holidata(line)\n\n if holidata is None:\n print(\"Following line could not be processed: '{}'\".format(line))\n continue\n\n for region in holidata[\"regions\"]:\n yield Holiday(\n locale=self.locale,\n region=region,\n date=holidata[\"date\"],\n description=holidata[\"description\"],\n flags=holidata[\"flags\"],\n notes=holidata[\"notes\"],\n )\n\n # Second, call holiday functions\n for method in [getattr(self, func) for func in dir(self) if func.startswith(\"holiday_\")]:\n holidays = method()\n\n for holiday in holidays:\n yield holiday\n\n def _parse_holidata(self, line):\n function_map = [\n (self.fixed_regex, self._date_from_fixed_reference),\n (self.nth_weekday_regex, self._date_from_weekday_reference),\n (self.easter_shift_regex, self._date_from_easter_reference),\n ]\n\n for reg_exp, create_date_from in function_map:\n m = reg_exp.search(line)\n if m is not None:\n return dict(regions=([x.strip() for x in m.group(\"regions\").split(\",\")] if m.group(\"regions\") is not None else [\"\"]),\n date=create_date_from(m),\n description=m.group(\"description\"),\n flags=m.group(\"flags\"),\n notes=\"\")\n\n return None\n\n def _date_from_fixed_reference(self, m):\n return SmartDayArrow(self.year, int(m.group(\"month\")), int(m.group(\"day\")))\n\n def _date_from_weekday_reference(self, m):\n return month_reference(self.year,\n m.group(\"month\"),\n first=m.group(\"last\").strip() == \"\") \\\n .shift_to_weekday(m.group(\"weekday\"),\n order=int(m.group(\"order\")),\n reverse=m.group(\"last\").strip() == \"last\",\n including=True)\n\n def _date_from_easter_reference(self, m):\n if self.easter_type is None:\n raise ValueError(\"Locale {0} does not provide its easter type (WESTERN|ORTHODOX)\".format(self.__class__.__name__))\n\n return easter(self.year, self.easter_type) \\\n .shift(days=int((m.group(\"days\")) if m.group(\"days\") is not None else 0) * (1 if m.group(\"direction\") == \"after\" else -1))\n" }, { "alpha_fraction": 0.6760858297348022, "alphanum_fraction": 0.6760858297348022, "avg_line_length": 29.822580337524414, "blob_id": "f8b89f1d7178ae0010e1e03f02500171d2d7e346", "content_id": "7dc2cbc7a749bff406debd9c21c9d410198e0028", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1911, "license_type": "permissive", "max_line_length": 157, "num_lines": 62, "path": "/src/holidata/__init__.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import *\nfrom .emitters import Emitter\n\ndef get_country_for(identifier):\n country_class = Country.get(identifier)\n\n if not country_class:\n raise ValueError(\"No plugin found for country id '{}'!\".format(identifier))\n\n return country_class()\n\n\ndef get_locale_class_for(identifier):\n locale_class = Locale.get(identifier)\n\n if not locale_class:\n raise ValueError(\"No plugin found for locale: {}!\".format(identifier))\n\n return locale_class\n\n\ndef create_locale_for(country_id=None, lang_id=None, year=None):\n country_class = get_country_for(country_id)\n\n if lang_id is not None and lang_id.lower() not in country_class.languages:\n raise ValueError(\"Language '{}' is not defined for country '{}'!\".format(lang_id, country_class.id))\n elif lang_id is None and country_class.default_lang is not None:\n lang_id = country_class.default_lang\n elif lang_id is None:\n raise ValueError(\"Country '{}' has no default language specified! Please choose one of [{}].\".format(country_id, \", \".join(country_class.languages)))\n\n locale_class = get_locale_class_for(\"{}-{}\".format(lang_id, country_id))\n\n return locale_class(year)\n\n\ndef create_emitter_for(identifier):\n emitter_class = Emitter.get(identifier)\n\n if not emitter_class:\n raise ValueError(\"Unsupported output format: '{}'!\".format(identifier))\n\n return emitter_class()\n\n\ndef parse_year(year):\n try:\n return int(year)\n except:\n raise ValueError(f\"Invalid year '{year}'! Has to be an integer.\")\n\n\nclass Holidata:\n locale = None\n emitter = None\n\n def __init__(self, country=None, language=None, year=None, output=None):\n self.locale = create_locale_for(country_id=country, lang_id=language, year=parse_year(year))\n self.emitter = create_emitter_for(output)\n\n def __str__(self):\n return self.emitter.output(self.locale)\n" }, { "alpha_fraction": 0.3165436387062073, "alphanum_fraction": 0.3617021143436432, "avg_line_length": 20.735849380493164, "blob_id": "7f87a9395064bd087cbcafa069c3b5506ba55d70", "content_id": "5f6abd365c721ddd3c3400089aa32446543b43eb", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2303, "license_type": "permissive", "max_line_length": 50, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_ZA-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-03-21',\n 'description': 'Human Rights Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Good Friday',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Family Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-27',\n 'description': 'Freedom Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-04-28',\n 'description': 'Freedom Day (Supplement)',\n 'locale': 'en-ZA',\n 'notes': 'Supplement holiday',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-01',\n 'description': \"Worker's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-06-16',\n 'description': 'Youth Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-08-09',\n 'description': \"National Women's Day\",\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-09-24',\n 'description': 'Heritage Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-12-16',\n 'description': 'Day of Reconciliation',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Day of Goodwill',\n 'locale': 'en-ZA',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3191857933998108, "alphanum_fraction": 0.36422693729400635, "avg_line_length": 20.79245376586914, "blob_id": "e7a8e058ddf6739eaaf60e01151473d15991473e", "content_id": "2eba4ed7622909e8683b6fa966adb791221f8aa6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2315, "license_type": "permissive", "max_line_length": 89, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hr_HR-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': 'Nova Godina',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-01-06',\n 'description': 'Sveta tri kralja',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-04-05',\n 'description': 'Uskrs',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-06',\n 'description': 'Uskršnji ponedjeljak',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-01',\n 'description': 'Praznik rada',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-30',\n 'description': 'Dan državnosti',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-06-04',\n 'description': 'Tijelovo',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-06-22',\n 'description': 'Dan antifašističke borbe',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-08-05',\n 'description': 'Dan pobjede i domovinske zahvalnosti i Dan hrvatskih branitelja',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-08-15',\n 'description': 'Velika Gospa',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-11-01',\n 'description': 'Dan svih svetih',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Božić',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-26',\n 'description': 'Sveti Stjepan',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.5982142686843872, "alphanum_fraction": 0.5982142686843872, "avg_line_length": 15, "blob_id": "a3762ef2f20f6f55634a47da6667d9fb87a9b00e", "content_id": "b84752fe702afd95e278febd0a59f50b0d85fd20", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 112, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/PL.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass PL(Country):\n id = \"PL\"\n languages = [\"pl\"]\n default_lang = \"pl\"\n" }, { "alpha_fraction": 0.31993502378463745, "alphanum_fraction": 0.3654080331325531, "avg_line_length": 20.61403465270996, "blob_id": "771ef0613d130117e5ad7981f01b3405b707a549", "content_id": "01c4baa25d7b1efc841d3d443e86c7a675e5dd8a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2463, "license_type": "permissive", "max_line_length": 44, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_NL-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Nieuwjaarsdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Goede Vrijdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-20',\n 'description': 'Eerste Paasdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Tweede Paasdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-26',\n 'description': 'Koningsdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-05-04',\n 'description': 'Dodenherdenking',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-05',\n 'description': 'Bevrijdingsdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-29',\n 'description': 'Hemelvaartsdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-08',\n 'description': 'Eerste Pinksterdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Tweede Pinksterdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-12-05',\n 'description': 'Sinterklaas',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-15',\n 'description': 'Koninkrijksdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Eerste Kerstdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Tweede Kerstdag',\n 'locale': 'nl-NL',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.30328208208084106, "alphanum_fraction": 0.35189032554626465, "avg_line_length": 20.305309295654297, "blob_id": "fb6ddd04000cf1b3b406ae03b9b1f9baf96d3d9f", "content_id": "3c213e5eff7b27af557259b139397be66a889f37", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4817, "license_type": "permissive", "max_line_length": 49, "num_lines": 226, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_AT-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-01-06',\n 'description': 'Heilige drei Könige',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '2',\n 'type': 'RF'\n },\n {\n 'date': '2015-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '6',\n 'type': 'RF'\n },\n {\n 'date': '2015-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '7',\n 'type': 'RF'\n },\n {\n 'date': '2015-03-19',\n 'description': 'Josef',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '8',\n 'type': 'RF'\n },\n {\n 'date': '2015-04-03',\n 'description': 'Karfreitag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-05',\n 'description': 'Ostersonntag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-06',\n 'description': 'Ostermontag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-01',\n 'description': 'Staatsfeiertag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-04',\n 'description': 'Florian',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '4',\n 'type': 'F'\n },\n {\n 'date': '2015-05-14',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-24',\n 'description': 'Pfingstsonntag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-25',\n 'description': 'Pfingstmontag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-06-04',\n 'description': 'Fronleichnam',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-09-24',\n 'description': 'Rupert',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '5',\n 'type': 'F'\n },\n {\n 'date': '2015-10-10',\n 'description': 'Tag der Volksabstimmung',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '2',\n 'type': 'F'\n },\n {\n 'date': '2015-10-26',\n 'description': 'Nationalfeiertag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-11-11',\n 'description': 'Martin',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '1',\n 'type': 'F'\n },\n {\n 'date': '2015-11-15',\n 'description': 'Leopold',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '9',\n 'type': 'F'\n },\n {\n 'date': '2015-11-15',\n 'description': 'Leopold',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '3',\n 'type': 'F'\n },\n {\n 'date': '2015-12-08',\n 'description': 'Mariä Empfängnis',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-24',\n 'description': 'Heiliger Abend',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Christtag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-26',\n 'description': 'Stefanitag',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-12-31',\n 'description': 'Silvester',\n 'locale': 'de-AT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.5472487807273865, "alphanum_fraction": 0.5789473652839661, "avg_line_length": 27.3389835357666, "blob_id": "13964125a1df8dd145416bfd919f6bc9373e5cf4", "content_id": "d5f87875d99c7b81b3e46c2adf336af36d532096", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1672, "license_type": "permissive", "max_line_length": 92, "num_lines": 59, "path": "/src/holidata/holidays/nl-NL.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom holidata.utils import SmartDayArrow\nfrom .holidays import Holiday, Locale\n\n\nclass nl_NL(Locale):\n \"\"\"\n 01-01: [NF] Nieuwjaarsdag\n 05-04: [NF] Dodenherdenking\n 05-05: [NF] Bevrijdingsdag\n 12-05: [NRF] Sinterklaas\n 12-25: [NRF] Eerste Kerstdag\n 12-26: [NRF] Tweede Kerstdag\n 2 days before Easter: [NRV] Goede Vrijdag\n Easter: [NRV] Eerste Paasdag\n 1 day after Easter: [NRV] Tweede Paasdag\n 39 days after Easter: [NRV] Hemelvaartsdag\n 49 days after Easter: [NRV] Eerste Pinksterdag\n 50 days after Easter: [NRV] Tweede Pinksterdag\n \"\"\"\n\n locale = \"nl-NL\"\n easter_type = EASTER_WESTERN\n\n def holiday_koningsdag(self):\n \"\"\"04-27 or saturday before if it falls on sunday: [NF] Koninginnedag/Koningsdag \"\"\"\n if self.year < 2014:\n date = SmartDayArrow(self.year, 4, 30)\n description = \"Koninginnedag\"\n else:\n date = SmartDayArrow(self.year, 4, 27)\n description = \"Koningsdag\"\n\n if date.weekday() == \"sunday\":\n date = date.shift(days=-1)\n\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=date,\n description=description,\n flags=\"NV\",\n notes=\"\")]\n\n def holiday_koninkrijksdag(self):\n date = SmartDayArrow(self.year, 12, 15)\n\n if date.weekday() == \"sunday\":\n date = date.shift(days=1)\n\n return [Holiday(\n locale=self.locale,\n region=\"\",\n date=date,\n description=\"Koninkrijksdag\",\n flags=\"NV\",\n notes=\"\")]\n" }, { "alpha_fraction": 0.3192431628704071, "alphanum_fraction": 0.3643317222595215, "avg_line_length": 20.798246383666992, "blob_id": "d063696947d86981e97fee25ef3efc6f57a05997", "content_id": "064f55b2f3e50e025c5cbea7d10daf042f98e092", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2495, "license_type": "permissive", "max_line_length": 54, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Ano Novo',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-02-16',\n 'description': 'Carnaval',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Sexta-feira Santa',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-04',\n 'description': 'Páscoa',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-25',\n 'description': 'Dia da Liberdade',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Dia do Trabalhador',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-06-03',\n 'description': 'Corpo de Deus',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-06-10',\n 'description': 'Dia de Portugal',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-08-15',\n 'description': 'Assunção de Nossa Senhora',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-10-05',\n 'description': 'Implantação da República',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-11-01',\n 'description': 'Dia de Todos os Santos',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-01',\n 'description': 'Restauração da Independência',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-08',\n 'description': 'Imaculada Conceição',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Natal',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3200301229953766, "alphanum_fraction": 0.3652108311653137, "avg_line_length": 20.778688430786133, "blob_id": "8e9342f167be894f09d6f927568692c8a0b6f340", "content_id": "94aa58b07adf3f0066c3cb613a502603fff22a25", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2663, "license_type": "permissive", "max_line_length": 56, "num_lines": 122, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2020] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2020-01-01',\n 'description': 'Novo leto',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-01-02',\n 'description': 'Novo leto',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-02-08',\n 'description': 'Prešernov dan',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-04-12',\n 'description': 'Velikonočna nedelja',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Velikonočni ponedeljek',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-27',\n 'description': 'Dan upora proti okupatorju',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-01',\n 'description': 'Praznik dela',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-02',\n 'description': 'Praznik dela',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-06-01',\n 'description': 'Binkošti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-06-25',\n 'description': 'Dan državnosti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-08-15',\n 'description': 'Marijino vnebovzetje',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-10-31',\n 'description': 'Dan reformacije',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-11-01',\n 'description': 'Dan spomina na mrtve',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-12-25',\n 'description': 'Božič',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-12-26',\n 'description': 'Dan samostojnosti in enotnosti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3200301229953766, "alphanum_fraction": 0.3652108311653137, "avg_line_length": 20.778688430786133, "blob_id": "c7e131b6d494967406ca743a3cd8e512d2ffba11", "content_id": "5edf38f2a7599a96ce8c0495bf39ab2e35202c71", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2663, "license_type": "permissive", "max_line_length": 56, "num_lines": 122, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sl_SI-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Novo leto',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-01-02',\n 'description': 'Novo leto',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-02-08',\n 'description': 'Prešernov dan',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-04-17',\n 'description': 'Velikonočna nedelja',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Velikonočni ponedeljek',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-27',\n 'description': 'Dan upora proti okupatorju',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Praznik dela',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-02',\n 'description': 'Praznik dela',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-06-06',\n 'description': 'Binkošti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-25',\n 'description': 'Dan državnosti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Marijino vnebovzetje',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-10-31',\n 'description': 'Dan reformacije',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Dan spomina na mrtve',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Božič',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Dan samostojnosti in enotnosti',\n 'locale': 'sl-SI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.32209739089012146, "alphanum_fraction": 0.3670412003993988, "avg_line_length": 20.66216278076172, "blob_id": "12260974977ad22ebf305945bfe53f7fab9f925f", "content_id": "d4bd5ba79f6a53280bbbc69d87aafc65f5f6a58e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1738, "license_type": "permissive", "max_line_length": 52, "num_lines": 74, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': 'Новый Год',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-01-07',\n 'description': 'Рождество Христово',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-02-23',\n 'description': 'День защитника Отечества',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-03-08',\n 'description': 'Международный женский день',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-04-12',\n 'description': 'Пасха',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-01',\n 'description': 'Праздник весны и труда',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-09',\n 'description': 'День Победы',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-06-12',\n 'description': 'День России',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-11-04',\n 'description': 'День народного единства',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.30519479513168335, "alphanum_fraction": 0.35304170846939087, "avg_line_length": 20.21014404296875, "blob_id": "b417ad2414d5a2d7136bda2d2a84e6f40078b02b", "content_id": "fdbea9a8bbcd9a5ce873751220ee4f3893051504", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2934, "license_type": "permissive", "max_line_length": 46, "num_lines": 138, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2017] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2017-01-01',\n 'description': 'Nyttårsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-02-26',\n 'description': 'Fastelavn',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-09',\n 'description': 'Palmesøndag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-13',\n 'description': 'Skjærtorsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-14',\n 'description': 'Langfredag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-16',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-17',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-05-01',\n 'description': 'Offentlig Høytidsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-05-08',\n 'description': 'Frigjøringsdag 1945',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-05-17',\n 'description': 'Grunnlovsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-05-25',\n 'description': 'Kristi Himmelfartsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-04',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-05',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-12-24',\n 'description': 'Julaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-25',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-26',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-31',\n 'description': 'Nyttårsaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.5957446694374084, "alphanum_fraction": 0.5957446694374084, "avg_line_length": 14.666666984558105, "blob_id": "3a38f1c78e6be7ad28cb5126c7ae2b90fac0fb89", "content_id": "66a1d9946b95c55a08b8366af1b11055dda3c61b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 94, "license_type": "permissive", "max_line_length": 29, "num_lines": 6, "path": "/src/holidata/holidays/FI.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass FI(Country):\n id = \"FI\"\n languages = [\"fi\", \"sv\"]\n" }, { "alpha_fraction": 0.32057416439056396, "alphanum_fraction": 0.3652312457561493, "avg_line_length": 21.008771896362305, "blob_id": "fc71ac7cac30f8abc7bbb6ead8f56a1ab99c9173", "content_id": "4e4f01db71f4aad95924d6594af01a75af5b5541", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2524, "license_type": "permissive", "max_line_length": 58, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_US-2017] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2017-01-01',\n 'description': 'Año Neuvo',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-01-16',\n 'description': 'Cumpleaños de Martin Luther King, Jr.',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2017-02-20',\n 'description': 'Día del Presidente',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2017-04-17',\n 'description': 'Día del Patriota',\n 'locale': 'es-US',\n 'notes': '',\n 'region': 'MA',\n 'type': 'V'\n },\n {\n 'date': '2017-04-17',\n 'description': 'Día del Patriota',\n 'locale': 'es-US',\n 'notes': '',\n 'region': 'ME',\n 'type': 'V'\n },\n {\n 'date': '2017-05-29',\n 'description': 'Día de los Caídos',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2017-07-04',\n 'description': 'Día de la Independiencia',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-09-04',\n 'description': 'Día del Trabajo',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2017-10-09',\n 'description': 'Día de Columbus',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2017-11-11',\n 'description': 'Día de los Veteranos',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-11-23',\n 'description': 'Día de Acción de Gracias',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2017-11-24',\n 'description': 'Día después de Acción de Gracias',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2017-12-24',\n 'description': 'Nochebuena',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-12-25',\n 'description': 'Navidad',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31116390228271484, "alphanum_fraction": 0.3605700731277466, "avg_line_length": 20.489795684814453, "blob_id": "a33599fa23e3e54ac175ea56a95f51dd7e1b5040", "content_id": "c30db00e199f24850293da4526dea0c5e06060e7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2128, "license_type": "permissive", "max_line_length": 54, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hu_HU-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Újév',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-03-15',\n 'description': 'Az 1848-as forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-04-20',\n 'description': 'Húsvét',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Húsvéthétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-05-01',\n 'description': 'A munka ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-06-08',\n 'description': 'Pünkösd',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-09',\n 'description': 'Pünkösdhétfő',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-08-20',\n 'description': 'Az államalapítás ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-10-23',\n 'description': 'Az 1956-os forradalom ünnepe',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Mindenszentek',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Karácsony',\n 'locale': 'hu-HU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.47533631324768066, "alphanum_fraction": 0.47533631324768066, "avg_line_length": 26.875, "blob_id": "89c37c33d341ccc4f67d60e0301a0eccc1936399", "content_id": "3fbbc2483b1c3d4305ae7ec14ca3da53dfda3034", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 223, "license_type": "permissive", "max_line_length": 110, "num_lines": 8, "path": "/src/holidata/holidays/DE.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass DE(Country):\n id = \"DE\"\n languages = [\"de\"]\n default_lang = \"de\"\n regions = [\"BB\", \"BE\", \"BH\", \"BW\", \"BY\", \"HE\", \"HH\", \"MV\", \"NI\", \"NW\", \"RP\", \"SH\", \"SL\", \"SN\", \"ST\", \"TH\"]\n" }, { "alpha_fraction": 0.329084575176239, "alphanum_fraction": 0.37504827976226807, "avg_line_length": 21.719297409057617, "blob_id": "80e0aab2b0e4ab7272e1e8a9333be7bc650985ab", "content_id": "c715f02384b0f60698c52236eea8b0d4eb873851", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2620, "license_type": "permissive", "max_line_length": 65, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[tr_TR-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Yılbaşı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-04-23',\n 'description': 'Ulusal Egemenlik ve Çocuk Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Emek ve Dayanışma Günü',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-02',\n 'description': 'Ramazan Bayramı (1. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-03',\n 'description': 'Ramazan Bayramı (2. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-04',\n 'description': 'Ramazan Bayramı (3. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-19',\n 'description': \"Atatürk'ü Anma, Gençlik ve Spor Bayramı\",\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-07-09',\n 'description': 'Kurban Bayramı (1. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-07-10',\n 'description': 'Kurban Bayramı (2. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-07-11',\n 'description': 'Kurban Bayramı (3. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-07-12',\n 'description': 'Kurban Bayramı (4. Gün)',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-07-15',\n 'description': 'Demokrasi ve Milli Birlik Günü',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-08-30',\n 'description': 'Zafer Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-10-29',\n 'description': 'Cumhuriyet Bayramı',\n 'locale': 'tr-TR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.32159623503685, "alphanum_fraction": 0.36760562658309937, "avg_line_length": 20.744897842407227, "blob_id": "0cf1faf283ce28e13084d9d68a64976a9403e226", "content_id": "e0d86e9e85b9d4a2b183f03ff11a4ab9d0923ad8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2149, "license_type": "permissive", "max_line_length": 67, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[et_EE-2019] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2019-01-01',\n 'description': 'Uusaasta',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-02-24',\n 'description': 'Iseseisvuspäev, Eesti Vabariigi aastapäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-04-19',\n 'description': 'Suur reede',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-04-21',\n 'description': 'Ülestõusmispühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-05-01',\n 'description': 'Kevadpüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-06-09',\n 'description': 'Nelipühade 1. püha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2019-06-23',\n 'description': 'Võidupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-06-24',\n 'description': 'Jaanipäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-08-20',\n 'description': 'Taasiseseisvumispäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-12-24',\n 'description': 'Jõululaupäev',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-12-25',\n 'description': 'Esimene jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2019-12-26',\n 'description': 'Teine jõulupüha',\n 'locale': 'et-EE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3346364200115204, "alphanum_fraction": 0.3784206509590149, "avg_line_length": 21.447368621826172, "blob_id": "af9d2069a47e53cb03fbe886e3deb19efbc9a6f9", "content_id": "e6a0060d1281074383d7e13ce473eab250083444", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2569, "license_type": "permissive", "max_line_length": 108, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[hr_HR-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Nova Godina',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-01-06',\n 'description': 'Sveta tri kralja',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-04-17',\n 'description': 'Uskrs',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-18',\n 'description': 'Uskršnji ponedjeljak',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Praznik rada',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-30',\n 'description': 'Dan državnosti',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-06-16',\n 'description': 'Tijelovo',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-06-22',\n 'description': 'Dan antifašističke borbe',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-08-05',\n 'description': 'Dan pobjede i domovinske zahvalnosti i Dan hrvatskih branitelja',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Velika Gospa',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Dan svih svetih',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-11-18',\n 'description': 'Dan sjećanja na žrtve Domovinskog rata i Dan sjećanja na žrtvu Vukovara i Škabrnje',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Božić',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2022-12-26',\n 'description': 'Sveti Stjepan',\n 'locale': 'hr-HR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3192431628704071, "alphanum_fraction": 0.3643317222595215, "avg_line_length": 20.798246383666992, "blob_id": "9571dfe27707fe99656110d3611d048b815f4093", "content_id": "a307ca47ab0d05f8c9cd362024b8f9cd863c43b7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2495, "license_type": "permissive", "max_line_length": 54, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2022] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2022-01-01',\n 'description': 'Ano Novo',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-03-01',\n 'description': 'Carnaval',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-15',\n 'description': 'Sexta-feira Santa',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-17',\n 'description': 'Páscoa',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-04-25',\n 'description': 'Dia da Liberdade',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-05-01',\n 'description': 'Dia do Trabalhador',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-06-10',\n 'description': 'Dia de Portugal',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-06-16',\n 'description': 'Corpo de Deus',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2022-08-15',\n 'description': 'Assunção de Nossa Senhora',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-10-05',\n 'description': 'Implantação da República',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-11-01',\n 'description': 'Dia de Todos os Santos',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-01',\n 'description': 'Restauração da Independência',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-08',\n 'description': 'Imaculada Conceição',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2022-12-25',\n 'description': 'Natal',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3056092858314514, "alphanum_fraction": 0.3520309329032898, "avg_line_length": 20.112245559692383, "blob_id": "577472a938a7bb554ef3e20624f03c7fc21f1bfc", "content_id": "f0b190f61a9514702654891ed73692663113e41f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2068, "license_type": "permissive", "max_line_length": 42, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2017] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2017-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-04-16',\n 'description': 'Ostern',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-17',\n 'description': 'Ostermontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-05-25',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-04',\n 'description': 'Pfingsten',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-05',\n 'description': 'Pfingstmontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-07-21',\n 'description': 'Nationalfeiertag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-11-11',\n 'description': 'Waffenstillstand',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-12-25',\n 'description': 'Weihnacht',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.32057416439056396, "alphanum_fraction": 0.3652312457561493, "avg_line_length": 21.008771896362305, "blob_id": "dd7b2d5c89c004d932045f09d381da9486a50741", "content_id": "faab41a45dd28a9ecd1f2755d4c239a5625dadb7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2524, "license_type": "permissive", "max_line_length": 58, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[es_US-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': 'Año Neuvo',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-01-16',\n 'description': 'Cumpleaños de Martin Luther King, Jr.',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-02-20',\n 'description': 'Día del Presidente',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-04-17',\n 'description': 'Día del Patriota',\n 'locale': 'es-US',\n 'notes': '',\n 'region': 'MA',\n 'type': 'V'\n },\n {\n 'date': '2023-04-17',\n 'description': 'Día del Patriota',\n 'locale': 'es-US',\n 'notes': '',\n 'region': 'ME',\n 'type': 'V'\n },\n {\n 'date': '2023-05-29',\n 'description': 'Día de los Caídos',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-07-04',\n 'description': 'Día de la Independiencia',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-09-04',\n 'description': 'Día del Trabajo',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-10-09',\n 'description': 'Día de Columbus',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-11-11',\n 'description': 'Día de los Veteranos',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-11-23',\n 'description': 'Día de Acción de Gracias',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-11-24',\n 'description': 'Día después de Acción de Gracias',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2023-12-24',\n 'description': 'Nochebuena',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Navidad',\n 'locale': 'es-US',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.31302428245544434, "alphanum_fraction": 0.35894039273262024, "avg_line_length": 20.37735939025879, "blob_id": "93953322d4b906a9191a22b8143913c5401eee6a", "content_id": "53164cadc30efc5bccd34b327a354e44293fd2b2", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2273, "license_type": "permissive", "max_line_length": 47, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_FI-2016] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2016-01-01',\n 'description': 'Nyårsdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-01-06',\n 'description': 'Trettondedagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-03-25',\n 'description': 'Långfredagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-27',\n 'description': 'Påskdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-03-28',\n 'description': 'Annandag påsk',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-01',\n 'description': 'Första maj',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-05-05',\n 'description': 'Kristi himmelfärdsdag',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-05-15',\n 'description': 'Pingst',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-06-25',\n 'description': 'Midsommardagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-11-05',\n 'description': 'Alla helgons dag',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2016-12-06',\n 'description': 'Självständighetsdagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2016-12-25',\n 'description': 'Juldagen',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2016-12-26',\n 'description': 'Annandag jul',\n 'locale': 'sv-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.5457170605659485, "alphanum_fraction": 0.5880654454231262, "avg_line_length": 27.46575355529785, "blob_id": "1670f70cc04324bb0f84fe846124e7fda3bae3c6", "content_id": "a1d3e3e83ba6e6a19895e41745e53e3c94e5d3a3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2085, "license_type": "permissive", "max_line_length": 133, "num_lines": 73, "path": "/src/holidata/holidays/sv-SE.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom holidata.utils import SmartDayArrow\nfrom .holidays import Locale, Holiday\n\n\"\"\"\nsource: https://www.riksdagen.se/sv/dokument-lagar/dokument/svensk-forfattningssamling/lag-1989253-om-allmanna-helgdagar_sfs-1989-253\nsource: https://www.riksdagen.se/sv/dokument-lagar/dokument/svensk-forfattningssamling/semesterlag-1977480_sfs-1977-480\n\"\"\"\n\n\nclass sv_SE(Locale):\n \"\"\"\n 01-01: [NF] Nyårsdagen\n 01-06: [NRF] Trettondedag jul\n 05-01: [NF] Första maj\n 06-06: [NF] Nationaldagen\n 12-24: [NRF] Julafton\n 12-25: [NRF] Juldagen\n 12-26: [NRF] Annandag jul\n 12-31: [NF] Nyårsafton\n 2 days before Easter: [NRV] Långfredagen\n Easter: [NRV] Påskdagen\n 1 day after Easter: [NRV] Annandag påsk\n 39 days after Easter: [NRV] Kristi himmelsfärdsdag\n 49 days after Easter: [NRV] Pingstdagen\n \"\"\"\n\n locale = \"sv-SE\"\n easter_type = EASTER_WESTERN\n\n def __midsommar(self):\n \"\"\"\n Find the Saturday between 20 and 26 June\n \"\"\"\n return SmartDayArrow(self.year, 6, 19).shift_to_weekday(\"saturday\", order=1, reverse=False)\n\n def holiday_midsommarafton(self):\n \"\"\"\n The day before midsommardagen: [NV] Midsommarafton\n \"\"\"\n return [Holiday(\n self.locale,\n \"\",\n self.__midsommar().shift(days=-1),\n \"Midsommarafton\",\n \"NV\"\n )]\n\n def holiday_midsommardagen(self):\n \"\"\"\n Saturday between 20 and 26 June: [NV] Midsommardagen\n \"\"\"\n return [Holiday(\n self.locale,\n \"\",\n self.__midsommar(),\n \"Midsommardagen\",\n \"NV\"\n )]\n\n def holiday_alla_helgons_dag(self):\n \"\"\"\n Saturday between 31 October and 6 November: [NRV] Alla helgons dag\n \"\"\"\n return [Holiday(\n self.locale,\n \"\",\n SmartDayArrow(self.year, 10, 30).shift_to_weekday(\"saturday\", order=1, reverse=False),\n \"Alla helgons dag\",\n \"NRV\"\n )]\n" }, { "alpha_fraction": 0.3192431628704071, "alphanum_fraction": 0.3643317222595215, "avg_line_length": 20.798246383666992, "blob_id": "85d2df5572ce86705bf517c3a9e6d6c5ede057c1", "content_id": "3dee39217b0e8a2691f239044d8c9f04cf486f25", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2495, "license_type": "permissive", "max_line_length": 54, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[pt_PT-2018] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2018-01-01',\n 'description': 'Ano Novo',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-02-13',\n 'description': 'Carnaval',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-03-30',\n 'description': 'Sexta-feira Santa',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-01',\n 'description': 'Páscoa',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-25',\n 'description': 'Dia da Liberdade',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-05-01',\n 'description': 'Dia do Trabalhador',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-05-31',\n 'description': 'Corpo de Deus',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-06-10',\n 'description': 'Dia de Portugal',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-08-15',\n 'description': 'Assunção de Nossa Senhora',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-10-05',\n 'description': 'Implantação da República',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-11-01',\n 'description': 'Dia de Todos os Santos',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-12-01',\n 'description': 'Restauração da Independência',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-12-08',\n 'description': 'Imaculada Conceição',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-12-25',\n 'description': 'Natal',\n 'locale': 'pt-PT',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.32209739089012146, "alphanum_fraction": 0.3670412003993988, "avg_line_length": 20.66216278076172, "blob_id": "2313264b74f8f033b235c38ddeb2162e5ca40f92", "content_id": "02bcf2a7709bd4c7dac66a535bb419d4f420999e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1738, "license_type": "permissive", "max_line_length": 52, "num_lines": 74, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[ru_RU-2017] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2017-01-01',\n 'description': 'Новый Год',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-01-07',\n 'description': 'Рождество Христово',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-02-23',\n 'description': 'День защитника Отечества',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-03-08',\n 'description': 'Международный женский день',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-04-16',\n 'description': 'Пасха',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-05-01',\n 'description': 'Праздник весны и труда',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-05-09',\n 'description': 'День Победы',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-06-12',\n 'description': 'День России',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-11-04',\n 'description': 'День народного единства',\n 'locale': 'ru-RU',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.30519479513168335, "alphanum_fraction": 0.35304170846939087, "avg_line_length": 20.21014404296875, "blob_id": "1fec289d557d8f6e05a4fc1916a5e626a83d7a82", "content_id": "945b72be9e11d17004cd4ecb5cf29e13156d2795", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2934, "license_type": "permissive", "max_line_length": 46, "num_lines": 138, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2020] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2020-01-01',\n 'description': 'Nyttårsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-02-23',\n 'description': 'Fastelavn',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-05',\n 'description': 'Palmesøndag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-09',\n 'description': 'Skjærtorsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-10',\n 'description': 'Langfredag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-12',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-13',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-01',\n 'description': 'Offentlig Høytidsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-08',\n 'description': 'Frigjøringsdag 1945',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-17',\n 'description': 'Grunnlovsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-21',\n 'description': 'Kristi Himmelfartsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-31',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-06-01',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-12-24',\n 'description': 'Julaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-25',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-26',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-31',\n 'description': 'Nyttårsaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31509268283843994, "alphanum_fraction": 0.3614298403263092, "avg_line_length": 20.38679313659668, "blob_id": "6cb06b00072793464e14bd2d307fbbaede359362", "content_id": "b108d2d606dec28ebc4d2f097d0bd167d42010fc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2293, "license_type": "permissive", "max_line_length": 42, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2020] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2020-01-01',\n 'description': 'Uudenvuodenpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-01-06',\n 'description': 'Loppiainen',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-04-10',\n 'description': 'Pitkäperjantai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-12',\n 'description': 'Pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-04-13',\n 'description': '2. pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-01',\n 'description': 'Vappu',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-05-21',\n 'description': 'Helatorstai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-05-31',\n 'description': 'Helluntaipäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-06-20',\n 'description': 'Juhannuspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-10-31',\n 'description': 'Pyhäinpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2020-12-06',\n 'description': 'Itsenäisyyspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2020-12-25',\n 'description': 'Joulupäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2020-12-26',\n 'description': 'Tapaninpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3293954133987427, "alphanum_fraction": 0.37665045261383057, "avg_line_length": 21.146154403686523, "blob_id": "32ff59b59b37321f7d53339647973afecf58aeb6", "content_id": "18f47b30a29a7f5312d44f0c1039ba4b24762c46", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2900, "license_type": "permissive", "max_line_length": 49, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[is_IS-2015] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2015-01-01',\n 'description': 'Nýársdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-04-02',\n 'description': 'Skírdagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-03',\n 'description': 'Föstudagurinn langi',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-05',\n 'description': 'Páskadagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-06',\n 'description': 'Annar dagur páska',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-04-23',\n 'description': 'Sumardagurinn fyrsti',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2015-05-01',\n 'description': 'Verkalýðsdagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-05-14',\n 'description': 'Uppstigningardagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-24',\n 'description': 'Hvítasunnudagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-05-25',\n 'description': 'Annar dagur hvítasunnu',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2015-06-17',\n 'description': 'Þjóðhátíðardagurinn',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2015-08-03',\n 'description': 'Frídagur verslunarmanna',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2015-12-24',\n 'description': 'Aðfangadagur jóla',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-25',\n 'description': 'Jóladagur',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-26',\n 'description': 'Annar dagur jóla',\n 'locale': 'is-IS',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2015-12-31',\n 'description': 'Gamlársdagur',\n 'locale': 'is-IS',\n 'notes': 'Holiday from 13:00',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.30519479513168335, "alphanum_fraction": 0.35304170846939087, "avg_line_length": 20.21014404296875, "blob_id": "d81fa642105722348522b639057daf1780c13d65", "content_id": "6cd6dcd3bc611642ccba95fbfa0c5120f6f72a96", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2934, "license_type": "permissive", "max_line_length": 46, "num_lines": 138, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nb_NO-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': 'Nyttårsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-02-19',\n 'description': 'Fastelavn',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-02',\n 'description': 'Palmesøndag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-06',\n 'description': 'Skjærtorsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-07',\n 'description': 'Langfredag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-09',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-10',\n 'description': 'Påskedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-01',\n 'description': 'Offentlig Høytidsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-08',\n 'description': 'Frigjøringsdag 1945',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-17',\n 'description': 'Grunnlovsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-18',\n 'description': 'Kristi Himmelfartsdag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-28',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-29',\n 'description': 'Pinsedag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-12-24',\n 'description': 'Julaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-26',\n 'description': 'Juledag',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-31',\n 'description': 'Nyttårsaften',\n 'locale': 'nb-NO',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.5636998414993286, "alphanum_fraction": 0.6369982361793518, "avg_line_length": 22.875, "blob_id": "ee60e99a67969bd8cdfc38e1c3236b02dedc97b4", "content_id": "196d03729793ad64701f066353d53b4cdce7388c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 573, "license_type": "permissive", "max_line_length": 42, "num_lines": 24, "path": "/src/holidata/holidays/it-IT.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "# coding=utf-8\nfrom dateutil.easter import EASTER_WESTERN\n\nfrom .holidays import Locale\n\n\nclass it_IT(Locale):\n \"\"\"\n 01-01: [NF] Capodanno\n 01-06: [NRF] Epifania\n 04-25: [NF] Festa della liberazione\n 05-01: [NF] Festa del lavoro\n 06-02: [NF] Festa della repubblica\n 08-15: [NRF] Assunzione (ferragosto)\n 11-01: [NRF] Ognissanti\n 12-08: [NRF] Immacolata concezione\n 12-25: [NRF] Natale\n 12-26: [NRF] S.to Stefano\n Easter: [NRV] Pasqua\n 1 day after Easter: [NRV] Pasquetta\n \"\"\"\n\n locale = \"it-IT\"\n easter_type = EASTER_WESTERN\n" }, { "alpha_fraction": 0.3051830232143402, "alphanum_fraction": 0.35157665610313416, "avg_line_length": 20.230770111083984, "blob_id": "7640c5aa5f333e11091255a9f1f9c983f81e99f3", "content_id": "1da51df236416dd18ea126401386016f34392fa1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2764, "license_type": "permissive", "max_line_length": 42, "num_lines": 130, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sv_SE-2014] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2014-01-01',\n 'description': 'Nyårsdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-01-06',\n 'description': 'Trettondedag jul',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-04-18',\n 'description': 'Långfredagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-20',\n 'description': 'Påskdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-04-21',\n 'description': 'Annandag påsk',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-05-01',\n 'description': 'Första maj',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-05-29',\n 'description': 'Kristi himmelsfärdsdag',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-06',\n 'description': 'Nationaldagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2014-06-08',\n 'description': 'Pingstdagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-06-20',\n 'description': 'Midsommarafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-06-21',\n 'description': 'Midsommardagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2014-11-01',\n 'description': 'Alla helgons dag',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2014-12-24',\n 'description': 'Julafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-25',\n 'description': 'Juldagen',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-26',\n 'description': 'Annandag jul',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2014-12-31',\n 'description': 'Nyårsafton',\n 'locale': 'sv-SE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.3314873278141022, "alphanum_fraction": 0.37579113245010376, "avg_line_length": 21.1842098236084, "blob_id": "96d9d163b284e8083c8cea5cbc6533e4ca70e852", "content_id": "7680350c1f8b721db4cee942d00e913d1ee84744", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2768, "license_type": "permissive", "max_line_length": 93, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[el_GR-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': 'Πρωτοχρονιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-01-06',\n 'description': 'Θεοφάνεια',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-02-27',\n 'description': 'Καθαρά Δευτέρα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-03-25',\n 'description': 'Ευαγγελισμός της Θεοτόκου και Εθνική Ημέρα Ανεξαρτησίας της Ελλάδας',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-04-14',\n 'description': 'Μεγάλη Παρασκευή',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-15',\n 'description': 'Μεγάλο Σάββατο',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-16',\n 'description': 'Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-17',\n 'description': 'Δευτέρα του Πάσχα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-01',\n 'description': 'Πρωτομαγιά',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-06-05',\n 'description': 'Δευτέρα του Αγίου Πνεύματος',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-08-15',\n 'description': 'Κοίμηση της Θεοτόκου',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-10-28',\n 'description': 'Ημέρα του ΌΧΙ',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Χριστούγεννα',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-26',\n 'description': 'Επόμενη ημέρα Χριστουγέννων',\n 'locale': 'el-GR',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3056092858314514, "alphanum_fraction": 0.3520309329032898, "avg_line_length": 20.112245559692383, "blob_id": "1fbd17d0e8938ffb2de8fd98180bc6221bf58243", "content_id": "255b7eeff77cd07f2ab742a580f7cfac0433b466", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2068, "license_type": "permissive", "max_line_length": 42, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_BE-2018] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2018-01-01',\n 'description': 'Neujahr',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-04-01',\n 'description': 'Ostern',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-02',\n 'description': 'Ostermontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-05-01',\n 'description': 'Tag der Arbeit',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-05-10',\n 'description': 'Christi Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-05-20',\n 'description': 'Pfingsten',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-05-21',\n 'description': 'Pfingstmontag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-07-21',\n 'description': 'Nationalfeiertag',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-08-15',\n 'description': 'Mariä Himmelfahrt',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-11-11',\n 'description': 'Waffenstillstand',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-12-25',\n 'description': 'Weihnacht',\n 'locale': 'de-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3191489279270172, "alphanum_fraction": 0.3645389974117279, "avg_line_length": 20.59183692932129, "blob_id": "abe019b8fc28fb1e29e7eeb7dcfe70e21033d127", "content_id": "f297f8e8a827bce746839d029050879f6ed7032d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2115, "license_type": "permissive", "max_line_length": 53, "num_lines": 98, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[nl_BE-2017] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2017-01-01',\n 'description': 'Nieuwjaar',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-04-16',\n 'description': 'Pasen',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-04-17',\n 'description': 'Paasmaandag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-05-01',\n 'description': 'Dag van de arbeid',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-05-25',\n 'description': 'Onze Lieve Heer hemelvaart',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-04',\n 'description': 'Pinksteren',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-06-05',\n 'description': 'Pinkstermaandag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2017-07-21',\n 'description': 'Nationale feestdag',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-08-15',\n 'description': 'Onze Lieve Vrouw hemelvaart',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-11-01',\n 'description': 'Allerheiligen',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2017-11-11',\n 'description': 'Wapenstilstand',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2017-12-25',\n 'description': 'Kerstmis',\n 'locale': 'nl-BE',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.34407365322113037, "alphanum_fraction": 0.3878020644187927, "avg_line_length": 21.877193450927734, "blob_id": "c1238de3c7866be9724948d924f85cd246aed502", "content_id": "3a1ea099b286a881fc5f5917fcbcec16f0160022", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2650, "license_type": "permissive", "max_line_length": 88, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[cs_CZ-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': 'Nový rok',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-01-01',\n 'description': 'Den obnovy samostatného českého státu',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Velký pátek',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Velikonoční pondělí',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-05-01',\n 'description': 'Svátek práce',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-05-08',\n 'description': 'Den vítězství',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-07-05',\n 'description': 'Den slovanských věrozvěstů Cyrila a Metoděje',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-07-06',\n 'description': 'Den upálení mistra Jana Husa',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-09-28',\n 'description': 'Den české státnosti',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-10-28',\n 'description': 'Den vzniku samostatného československého státu',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-11-17',\n 'description': 'Den boje za svobodu a demokracii a Mezinárodní den studentstva',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-24',\n 'description': 'Štědrý den',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-25',\n 'description': '1. svátek vánoční',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-26',\n 'description': '2. svátek vánoční',\n 'locale': 'cs-CZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3138362169265747, "alphanum_fraction": 0.35901573300361633, "avg_line_length": 20.75438690185547, "blob_id": "68517f77adb2be9aaefe4a74c03db4f228bc05bd", "content_id": "9882ea16c406de8fa298fc88777a81b3a0571f69", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2479, "license_type": "permissive", "max_line_length": 50, "num_lines": 114, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2021] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2021-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-01-04',\n 'description': \"Day after New Year's Day\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-02-06',\n 'description': 'Waitangi Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-02-08',\n 'description': 'Waitangi Day (observed)',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-04-02',\n 'description': 'Good Friday',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-05',\n 'description': 'Easter Monday',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2021-04-25',\n 'description': 'ANZAC Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-04-26',\n 'description': 'ANZAC Day (observed)',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-06-07',\n 'description': \"Queen's Birthday\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-10-25',\n 'description': 'Labour Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2021-12-26',\n 'description': 'Boxing Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2021-12-27',\n 'description': 'Christmas Day (observed)',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2021-12-28',\n 'description': 'Boxing Day (observed)',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n }\n]" }, { "alpha_fraction": 0.5982142686843872, "alphanum_fraction": 0.5982142686843872, "avg_line_length": 15, "blob_id": "d86521b49c400a2ff8314fac706a68cd1b1792b0", "content_id": "2b1d87c96ed87adee52b5b9afbe2a8affe9c90fd", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 112, "license_type": "permissive", "max_line_length": 29, "num_lines": 7, "path": "/src/holidata/holidays/TR.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "from .holidays import Country\n\n\nclass TR(Country):\n id = \"TR\"\n languages = [\"tr\"]\n default_lang = \"tr\"\n" }, { "alpha_fraction": 0.3381712734699249, "alphanum_fraction": 0.3817126154899597, "avg_line_length": 21.598360061645508, "blob_id": "e91dc54adfcfb0dd9800f13b0e4ad24fdbe3926a", "content_id": "0961a06c67baabfb270c72d972a1608f371f6201", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2790, "license_type": "permissive", "max_line_length": 58, "num_lines": 122, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2023] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2023-01-01',\n 'description': 'Deň vzniku Slovenskej republiky',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-01-06',\n 'description': 'Zjavenie Pána / Traja králi',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-04-07',\n 'description': 'Veľký piatok',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-04-10',\n 'description': 'Veľkonočný pondelok',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2023-05-01',\n 'description': 'Sviatok práce',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-05-08',\n 'description': 'Deň víťazstva nad fašizmom',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-07-05',\n 'description': 'Sviatok svätého Cyrila a Metoda',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-08-29',\n 'description': 'Výročie SNP',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-09-01',\n 'description': 'Deň Ústavy Slovenskej republiky',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-09-15',\n 'description': 'Sedembolestná Panna Mária',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-11-01',\n 'description': 'Sviatok všetkých svätých',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-11-17',\n 'description': 'Deň boja za slobodu a demokraciu',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2023-12-24',\n 'description': 'Štedrý deň',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-25',\n 'description': 'Prvý sviatok vianočný',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2023-12-26',\n 'description': 'Druhý sviatok vianočný',\n 'locale': 'sk-SK',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" }, { "alpha_fraction": 0.3187675178050995, "alphanum_fraction": 0.3635854423046112, "avg_line_length": 20.780487060546875, "blob_id": "a1b33dedeb3ecf38657730491dc0f480fb11ea65", "content_id": "be47366992896225f227360fca04850cc1427df4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1785, "license_type": "permissive", "max_line_length": 51, "num_lines": 82, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_GB-2011] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2011-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-01-03',\n 'description': \"New Year's Day (observed)\",\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2011-04-22',\n 'description': 'Good Friday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-04-25',\n 'description': 'Easter Monday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2011-05-02',\n 'description': 'Early May Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2011-05-30',\n 'description': 'Spring Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2011-08-29',\n 'description': 'August Bank Holiday',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2011-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2011-12-26',\n 'description': 'Boxing Day',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2011-12-27',\n 'description': 'Christmas Day (observed)',\n 'locale': 'en-GB',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n }\n]" }, { "alpha_fraction": 0.3079564869403839, "alphanum_fraction": 0.35374927520751953, "avg_line_length": 20.317073822021484, "blob_id": "2de8e4a807b69e7efc68775d3b42308316a4597d", "content_id": "25766fc5477a9d788f629c948240e5210a90cd08", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1747, "license_type": "permissive", "max_line_length": 50, "num_lines": 82, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[en_NZ-2018] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2018-01-01',\n 'description': \"New Year's Day\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-01-02',\n 'description': \"Day after New Year's Day\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-02-06',\n 'description': 'Waitangi Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-03-30',\n 'description': 'Good Friday',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-02',\n 'description': 'Easter Monday',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2018-04-25',\n 'description': 'ANZAC Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2018-06-04',\n 'description': \"Queen's Birthday\",\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-10-22',\n 'description': 'Labour Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NV'\n },\n {\n 'date': '2018-12-25',\n 'description': 'Christmas Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2018-12-26',\n 'description': 'Boxing Day',\n 'locale': 'en-NZ',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n }\n]" }, { "alpha_fraction": 0.31509268283843994, "alphanum_fraction": 0.3614298403263092, "avg_line_length": 20.38679313659668, "blob_id": "97e27dc7c7290e104285931de8716fc435d9928a", "content_id": "52d478d2888426a827985a0d3156332137b4d4c2", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2293, "license_type": "permissive", "max_line_length": 42, "num_lines": 106, "path": "/tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[fi_FI-2013] 1.py", "repo_name": "GothenburgBitFactory/holidata", "src_encoding": "UTF-8", "text": "[\n {\n 'date': '2013-01-01',\n 'description': 'Uudenvuodenpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-01-06',\n 'description': 'Loppiainen',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-03-29',\n 'description': 'Pitkäperjantai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-03-31',\n 'description': 'Pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-04-01',\n 'description': '2. pääsiäispäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-01',\n 'description': 'Vappu',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-05-09',\n 'description': 'Helatorstai',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-05-19',\n 'description': 'Helluntaipäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-06-22',\n 'description': 'Juhannuspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-11-02',\n 'description': 'Pyhäinpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRV'\n },\n {\n 'date': '2013-12-06',\n 'description': 'Itsenäisyyspäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NF'\n },\n {\n 'date': '2013-12-25',\n 'description': 'Joulupäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n },\n {\n 'date': '2013-12-26',\n 'description': 'Tapaninpäivä',\n 'locale': 'fi-FI',\n 'notes': '',\n 'region': '',\n 'type': 'NRF'\n }\n]" } ]
392
rohitkuk/DC_GAN_IM_1
https://github.com/rohitkuk/DC_GAN_IM_1
cf2ee64e203dce7a862f12f60ef6f73202b0ab6c
886efb381743830fa0815aba52a563019b9d5326
40d3c20888ae8f442745a2514112e40e8c220478
refs/heads/master
2023-05-10T16:15:31.285591
2021-06-07T17:02:19
2021-06-07T17:02:19
342,787,712
3
0
null
null
null
null
null
[ { "alpha_fraction": 0.6377307176589966, "alphanum_fraction": 0.6527683138847351, "avg_line_length": 31.503704071044922, "blob_id": "f5b13e7bb341d43692e61262e9d31dba3a558658", "content_id": "b158b0913dc7ca653de28fdbe6c1c17610d35cd6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4389, "license_type": "no_license", "max_line_length": 123, "num_lines": 135, "path": "/train.py", "repo_name": "rohitkuk/DC_GAN_IM_1", "src_encoding": "UTF-8", "text": "# Imports \nimport torch\nimport torch.nn as nn\nfrom torchvision import datasets, transforms\nfrom torch.utils.data import DataLoader\nimport torch.optim as optim\nfrom model import Discrimiator, Generator, initialize_wieghts\nfrom torch.utils.tensorboard import SummaryWriter\nfrom tqdm.auto import tqdm\nfrom torchvision.utils import make_grid\nimport os\nimport shutil\nfrom IPython import get_ipython\nimport wandb\n\nwandb.init(project=\"gans\", entity=\"rohitkuk\")\n\nshutil.rmtree(\"logs\") if os.path.isdir(\"logs\") else \"\"\n\n# Hyper Paramerts\nDEVICE = 'cuda' if torch.cuda.is_available() else 'cpu'\nNUM_EPOCHS = 5\nNOISE_DIM = 100\nIMG_DIM = 64\nlr = 2e-4\nBATCH_SIZE = 128\nMAPS_GEN = 64\nMAPS_DISC = 64\nIMG_CHANNELS = 3\nFIXED_NOISE = torch.randn(64, NOISE_DIM, 1, 1).to(DEVICE)\n\n\n# Transforms\nTrasforms = transforms.Compose([\n transforms.Resize(IMG_DIM),\n transforms.CenterCrop(IMG_DIM),\n transforms.ToTensor(),\n transforms.Normalize(\n (0.5, 0.5, 0.5),\n (0.5, 0.5, 0.5))\n ])\n\n\n# Data Loaders\ntrain_dataset = datasets.ImageFolder(root = 'img_align_celeba', transform=Trasforms)\ntrain_loader = DataLoader(train_dataset, batch_size = BATCH_SIZE, shuffle=True, drop_last=True)\n\n\n# Model Initializes\ngenerator = Generator(noise_channels=NOISE_DIM, img_channels=IMG_CHANNELS, maps=MAPS_GEN).to(DEVICE)\ndiscremenator = Discrimiator(num_channels=IMG_CHANNELS, maps=MAPS_DISC).to(DEVICE)\n\n\n# weights Initialize\ninitialize_wieghts(generator)\ninitialize_wieghts(discremenator)\n\n# discremenator.apply(initialize_wieghts)\n# generator.apply(initialize_wieghts)\n\n\n# Loss and Optimizers\ngen_optim = optim.Adam(params = generator.parameters(), lr=lr, betas=(0.5, 0.999))\ndisc_optim = optim.Adam(params = discremenator.parameters(), lr=lr, betas=(0.5, 0.999))\ncriterion = nn.BCELoss()\n\n\n# Tensorboard Implementation\nwriter_real = SummaryWriter(f\"logs/real\")\nwriter_fake = SummaryWriter(f\"logs/fake\")\n\nwandb.watch(generator)\nwandb.watch(discremenator)\n\n\n# Code for COLLAB TENSORBOARD VIEW\ntry:\n get_ipython().magic(\"%load_ext tensorboard\")\n get_ipython().magic(\"%tensorboard --logdir logs\")\nexcept:\n pass\n\n# training\ndiscremenator.train()\ngenerator.train()\nstep = 0\n\nfor epoch in range(1, NUM_EPOCHS+1):\n tqdm_iter = tqdm(enumerate(train_loader), total = len(train_loader), leave = False)\n\n for batch_idx, (data, _) in tqdm_iter:\n data = data.to(DEVICE)\n batch_size = data.shape[0]\n \n # ====================== Training the Discremnator===============\n latent_noise = torch.randn(BATCH_SIZE, NOISE_DIM, 1, 1).to(DEVICE)\n fake_img = generator(latent_noise)\n \n disc_fake = discremenator(fake_img.detach()).reshape(-1)\n disc_real = discremenator(data).reshape(-1)\n\n disc_fake_loss = criterion(disc_fake, torch.zeros_like(disc_fake))\n disc_real_loss = criterion(disc_real, torch.ones_like(disc_real))\n disc_loss = (disc_fake_loss+disc_real_loss)/2\n\n discremenator.zero_grad()\n disc_loss.backward()\n disc_optim.step()\n\n # ====================== Training the Generator===============\n # gen_img = generator(latent_noise)\n output = discremenator(fake_img).reshape(-1)\n gen_loss = criterion(output, torch.ones_like(output))\n generator.zero_grad()\n gen_loss.backward()\n gen_optim.step()\n \n # Logger\n tqdm_iter.set_description(f\"Epoch [{epoch}/{NUM_EPOCHS}]\")\n tqdm_iter.set_postfix(disc_loss = \"{0:.4f}\".format(disc_loss.item()), gen_loss = \"{0:.4f}\".format(gen_loss.item()))\n\n # for Tensorboard\n if batch_idx % 50 == 0:\n GAN_gen = generator(FIXED_NOISE)\n \n img_grid_real = make_grid(data[:32], normalize=True)\n img_grid_fake = make_grid(GAN_gen[:32], normalize=True)\n\n writer_real.add_image(\"Real\", img_grid_real, global_step=step)\n writer_fake.add_image(\"Fake\", img_grid_fake, global_step=step)\n wandb.log({\"Discremenator Loss\": disc_loss.item(), \"Generator Loss\": gen_loss.item()})\n wandb.log({\"img\": [wandb.Image(img_grid_fake, caption=step)]})\n step +=1 \n torch.save(generator.state_dict(), os.path.join(wandb.run.dir, 'dc_gan_model_gen.pt'))\n torch.save(discremenator.state_dict(), os.path.join(wandb.run.dir, 'dc_gan_model_disc.pt'))\n\n" } ]
1
SathyaBhat/folding-info
https://github.com/SathyaBhat/folding-info
03d45b5128ef4b84365c7be963f7c4973f11f162
8f2f6a02c390e1e5d32a8efb167eb9dc276347cd
f9852fb8df3af60a150ea292864e1e8f9fa0e483
refs/heads/master
2023-05-26T09:17:16.382392
2023-05-23T05:14:18
2023-05-23T05:14:18
259,902,507
0
0
null
2020-04-29T11:07:55
2020-07-25T23:05:31
2023-05-23T05:14:19
Python
[ { "alpha_fraction": 0.7033492922782898, "alphanum_fraction": 0.7177033424377441, "avg_line_length": 29, "blob_id": "f57689f3c7d4061f002a590c80b3b34c1535d072", "content_id": "990265d74d1ae82bf54ce5ec7f27b9a52618ded8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 209, "license_type": "no_license", "max_line_length": 57, "num_lines": 7, "path": "/lambda_function.py", "repo_name": "SathyaBhat/folding-info", "src_encoding": "UTF-8", "text": "from folding_stats import get_and_publish_stats\nVERSION=\"2.0.0\"\n\ndef lambda_handler(event, context):\n print(f\"Starting F@H stats lambda version {VERSION}\")\n get_and_publish_stats()\n print(f\"Goodbye!\")" }, { "alpha_fraction": 0.375, "alphanum_fraction": 0.6875, "avg_line_length": 15, "blob_id": "9ea656f2b65417b131f60f3acd6096ffe4224a8e", "content_id": "fb874fd720e6f10e1d49384c887b3973e7501807", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 32, "license_type": "no_license", "max_line_length": 16, "num_lines": 2, "path": "/requirements.txt", "repo_name": "SathyaBhat/folding-info", "src_encoding": "UTF-8", "text": "boto3==1.14.28\nrequests==2.31.0\n" }, { "alpha_fraction": 0.7322580814361572, "alphanum_fraction": 0.7322580814361572, "avg_line_length": 33.55555725097656, "blob_id": "570ca17adcea386ccacbf36086fbe56f38b41106", "content_id": "d851b64675d705c94ebea97501bd30edf6e1907b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 310, "license_type": "no_license", "max_line_length": 100, "num_lines": 9, "path": "/README.md", "repo_name": "SathyaBhat/folding-info", "src_encoding": "UTF-8", "text": "### Folding stats Discord Update\n\nFetches stats generated by [Bibhas'](https://github.com/iambibhas) endpoint and pushes it to discord\n\n### Updating the lambda\n\n- Install [GNU Make](https://www.gnu.org/software/make/)\n- Run `make build`\n - If you have profiles, make sure to export `export AWS_PROFILE=<name>`" }, { "alpha_fraction": 0.6197339296340942, "alphanum_fraction": 0.6419068574905396, "avg_line_length": 30.10344886779785, "blob_id": "3d45b2044e91a082a3e74479588831f79a18e058", "content_id": "2dbe7c83543d57da4d5520d0e8fbe9f38bc37b8c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1804, "license_type": "no_license", "max_line_length": 118, "num_lines": 58, "path": "/folding_stats.py", "repo_name": "SathyaBhat/folding-info", "src_encoding": "UTF-8", "text": "import requests\nfrom time import asctime\nfrom json import loads\nfrom sys import exit\nfrom os import environ\nimport boto3\n\n\nfrom difflib import unified_diff\n\ndef save_to_dynamo(current_data, table):\n table.update_item(\n Key={\n 'id': 1 \n },\n UpdateExpression='set details=:val1', \n ExpressionAttributeValues={\n ':val1':current_data\n }\n ) \n\ndef get_from_dynamo(table):\n return table.get_item(Key={'id':1}).get('Item').get('details')\n\n\ndef get_and_publish_stats():\n discord_webhook = environ.get('DISCORD_WEBHOOK')\n stats_url = environ.get('STATS_URL')\n response = requests.get(stats_url)\n if response.status_code != 200:\n print(f\"Couldn't fetch stats, status code: {response.status_code}\")\n exit(1)\n\n dynamo = boto3.resource('dynamodb', region_name='eu-west-1')\n table = dynamo.Table('folding_info')\n previous_data = get_from_dynamo(table)\n current_data = response.text\n save_to_dynamo(current_data, table)\n diff_text = ''.join(unified_diff(previous_data.splitlines(1), current_data.splitlines(1)))\n discord_message = {\n \"content\": f\"Folding@Home stats for Hackatta as of {asctime()} UTC\",\n \"embeds\": [\n {\n \"description\": f\"```{diff_text}```\",\n \"title\": \"Team Summary\",\n \"url\": \"https://folding.extremeoverclocking.com/team_summary.php?s=&t=237378\",\n \"color\": 4289797,\n \"footer\": {\n \"icon_url\": \"https://foldingathome.org/wp-content/uploads/2016/09/cropped-folding-at-home-logo-1-64x64.png\",\n \"text\": \"Folding@Home\"\n }\n }\n ]\n }\n\n discord_response = requests.post(url=discord_webhook, json=discord_message)\n if discord_response.status_code != 204:\n print(f\"Could not post to discord, status code {discord_response.status_code}\")\n" }, { "alpha_fraction": 0.7260726094245911, "alphanum_fraction": 0.7326732873916626, "avg_line_length": 37, "blob_id": "0320326c25ea0809ab36405d7c612b10f0b160d6", "content_id": "2c5fa7269fb95116f1cab79cca67d30439498616", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 303, "license_type": "no_license", "max_line_length": 102, "num_lines": 8, "path": "/Makefile", "repo_name": "SathyaBhat/folding-info", "src_encoding": "UTF-8", "text": ".PHONY: build\n.DEFAULT_GOAL := build\n\nbuild:\n\t@pip3 install --target ./package -r requirements.txt\n\tcd package && zip -r9 ../function.zip .\n\t@zip -q -g function.zip lambda_function.py folding_stats.py\n\taws lambda update-function-code --function-name folding_at_home_stats --zip-file fileb://function.zip" } ]
5
fmelp/news_classifier
https://github.com/fmelp/news_classifier
a9aedbaa53f3bb6cc07fa9424a9e49bc903e746a
3a20f8d1d695f224cba4f9e0542ea78bc73d615d
9150635e105e804cb697227f1b73feaa0910715e
refs/heads/master
2021-01-18T14:19:18.688412
2015-02-07T14:13:33
2015-02-07T14:13:33
30,457,733
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6845733523368835, "alphanum_fraction": 0.6914666295051575, "avg_line_length": 27.228187561035156, "blob_id": "74f0726cc769f5f19c752ac6e84175b3ca969b4d", "content_id": "3fcf65b65a70a593954af2a755bda09c4113d729", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4207, "license_type": "no_license", "max_line_length": 148, "num_lines": 149, "path": "/classifier.py", "repo_name": "fmelp/news_classifier", "src_encoding": "UTF-8", "text": "#!/bin/python\n\nimport string\nimport sys\nimport random\nimport operator\nfrom sklearn.naive_bayes import MultinomialNB\nfrom sklearn.linear_model import LogisticRegression\nfrom sklearn.preprocessing import LabelEncoder\nfrom sklearn.feature_extraction import DictVectorizer\nfrom sklearn.cross_validation import train_test_split\n\nkeyWords = [\"shooting\", \"firearm\", \"gun\", \"pistol\", \"bullet\", \"gunshot\"]\n\n#read in raw data from file and return a list of (label, article) tuples\ndef get_data(filename): \n\tdata = [line.strip().split('\\t') for line in open(filename).readlines()]\n\trandom.shuffle(data)\n\treturn data\n\n\ndef remove_stop_word(lol):\n\tstop_words = [line.strip() for line in open('stopwords.txt')]\n\tfor word in stop_words:\n\t\tif word in ['.', ',', '/', ';', '!', '?', '-', \"\\\\\"]:\n\t\t\tlol[0] = lol[0].replace(word, ' ')\n\t\tif len(word) <= 4:\n\t\t\tlol = [x for x in lol if x not in stop_words]\n\t\telse:\n\t\t\tlol[0] = lol[0].replace(word, '')\n\treturn lol\n\n\ndef get_features(X) :\n\tfeatures = []\n\tstop_words = [line.strip() for line in open('stopwords.txt')]\n\n\tfor x in X:\n\t\tf ={}\n\t\tarticle = x.split()\n\t\tarticle = [word for word in article if word not in stop_words]\n\t\tfor word in article:\n\t\t\tif word in f : f[word] += 1\n\t\t\telse : f[word] = 1\n\t\tfeatures.append(f)\n\treturn features\n\n\n\n#vectorize feature dictionaries and return feature and label matricies\ndef get_matricies(data) : \n\tdv = DictVectorizer(sparse=True) \n\tle = LabelEncoder()\n\ty = [d[0] for d in data]\n\ttexts = [d[1] for d in data]\n\tX = get_features(texts)\n\t#Here we are returning 5 things, the label vector y and feature matrix X, but also the texts from which the features were extracted and the \n\t#objects that were used to encode them. These will come in handy for your analysis, but you can ignore them for the initial parts of the assignment\n\treturn le.fit_transform(y), dv.fit_transform(X), texts, dv, le\n\n\n#train and multinomial naive bayes classifier\ndef train_classifier(X, y):\n\tclf = LogisticRegression()\n\tclf.fit(X,y)\n\treturn clf\n\n\n#test the classifier\ndef test_classifier(clf, X, y):\n\treturn clf.score(X,y)\n\n\n#cross validation\t\ndef cross_validate(X, y, numfolds=5):\n\ttest_accs = []\n\tsplit = 1.0 / numfolds\n\tfor i in range(numfolds):\n\t\tx_train, x_test, y_train, y_test = train_test_split(X, y, test_size=split, random_state=i)\n\t\tclf = train_classifier(x_train, y_train)\n\t\ttest_acc = test_classifier(clf, x_test, y_test)\n\t\ttest_accs.append(test_acc)\n\t\tprint 'Fold %d : %.05f'%(i,test_acc)\n\ttest_average = float(sum(test_accs))/ numfolds\n\tprint 'Test Average : %.05f'%(test_average)\n\tprint\n\treturn test_average\n\n\n#run a rule based classifier and calculate the accuracy\ndef rule_based_classifier(data):\n\tcorrect = 0.0; total = 0.0\n\tfor label, text in data : \n\t\tprediction = '0'\n\t\tfor keyWord in keyWords:\n\t\t\tif keyWord in text:\n\t\t\t\tprediction = '1'\n\t\tif prediction == label:\n\t\t\tcorrect += 1\n\t\ttotal += 1\n\tprint 'Rule-based classifier accuracy: %.05f'%(correct / total)\n\n\n#train and multinomial naive bayes classifier\ndef get_top_features(X, y, dv):\n\tclf = train_classifier(X, y)\n\t#the DictVectorizer object remembers which column number corresponds to which feature, and return the feature names in the correct order\n\tfeature_names = dv.get_feature_names()\n\tweights_ls = clf.coef_\n\td = {}\n\tfor keyWord in keyWords:\n\t\td[keyWord] = weights_ls[0][feature_names.index(keyWord)]\n\t\n\treturn d\n\t\n\ndef get_misclassified_examples(y, X, texts) :\n\tx_train, x_test, y_train, y_test, train_texts, test_texts = train_test_split(X, y, texts)\n\tclf = train_classifier(x_train, y_train)\n\tpred = clf.predict(x_test)\n\tpreds_bool = []\t\n\tfalse_pos = ''\n\tfor i in range(len(y_test)):\n\t\tif pred[i] == y_test[i]:\n\t\t\tpreds_bool.append(True)\n\t\telse:\n\t\t\tpreds_bool.append(False)\n\t\t\tif pred[i] == 1:\n\t\t\t\tfalse_pos = test_texts[i]\n\t\t\t\n\tprint \"\\nFALSE POSITIVE EXAMPLES: \" + false_pos\n\treturn preds_bool\n\n\nif __name__ == '__main__' : \n\n\traw_data = get_data('articles')\n\t\n\tprint '\\nRule-based classification'\n\trule_based_classifier(raw_data)\n\n\tprint '\\nStatistical classification'\n\ty, X, texts, dv, le = get_matricies(raw_data)\n\tcross_validate(X,y)\t\n\n\ttop_features = get_top_features(X, y, dv)\n\tprint '\\nTop Features: '\n\tprint top_features\n\tget_misclassified_examples(y, X, texts)\n\n" }, { "alpha_fraction": 0.6538090705871582, "alphanum_fraction": 0.6933462023735046, "avg_line_length": 39.6274528503418, "blob_id": "790f8063482eb9a1cf7c9b7d64cc5152a4e8dab6", "content_id": "240d4a570422b41eca2de785a84cbce94d9fc751", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2074, "license_type": "no_license", "max_line_length": 145, "num_lines": 51, "path": "/clean_and_process_data.py", "repo_name": "fmelp/news_classifier", "src_encoding": "UTF-8", "text": "import sys\nimport urllib2\nimport json \nimport csv\n\n#api_key = 'd18dab77eed984e5982cc2973fedd9a02c342411'\n#api_key = '87feec954e195b43fa85db60bfa2ad51a899e479'\napi_key = '9814112249d7c0d6f22b778837e86f6789f2ff73'\nrequest = 'http://access.alchemyapi.com/calls/url/URLGetCombinedData?apikey=%s&url=%s&extract=title,pub-date,entity,keyword,text&outputMode=json'\ntextRequest = 'http://access.alchemyapi.com/calls/url/URLGetText?apikey=%s&url=%s&outputMode=json'\n\ndef get_text(url) : \n\ttry : entities = urllib2.urlopen(textRequest%(api_key, url))\n\texcept urllib2.HTTPError : sys.stderr.write('BAD REQUEST\\n'); return None\n\ttry : response = json.loads(entities.read())\n\texcept ValueError : sys.stderr.write('JSON ERROR\\n'); return None\n\tprint url, response['status']\n\tif response['status'] == 'OK' : \n\t\ttxt = response['text'].encode('ascii', 'ignore').split('\\n')\n\t\treturn ' '.join(['<p>%s</p>'%l.strip() for l in txt if not(l.strip() == '')])\n\treturn None\n\ndef get_fields(url) : \n\ttry : data = urllib2.urlopen(request%(api_key, url))\n\texcept urllib2.HTTPError : sys.stderr.write('BAD REQUEST\\n'); return None\n\t#print data.read()\n\ttry : response = json.loads(data.read())\n\texcept ValueError : sys.stderr.write('JSON ERROR\\n'); return None\n\tif response['status'] == 'OK' : \n\t\ttitle = response['title'].encode('ascii', 'ignore')\n\t\tdate = response['publicationDate']['date'].encode('ascii', 'ignore')\n\t\tentities = json.dumps(response['entities'])\n\t\tkeywords = json.dumps(response['keywords'])\n\t\treturn title, date, entities, keywords\n\treturn None\n\noutput = csv.writer(open('gun-violence-urls-and-entitites.csv', 'w'))\noutput.writerow(['url', 'city', 'state', 'title', 'date', 'text', 'entities', 'keywords'])\n\nfor url in sys.stdin : \n\turl, state, city = url.strip().split('\\t')\n\t#sys.stderr.write(url+'\\n')\n\ttxt = get_text(url)\n\t#print txt\n\tif txt : \n\t\tfields = get_fields(url)\n\t\tif fields : \n\t\t\ttitle, date, entities, kws = fields\n\t\t\toutput.writerow([url, city, state, title, date, txt, entities, kws])\n\t\telse : \n\t\t\toutput.writerow([url, city, state, \"NA\", \"NA\", txt, \"NA\", \"NA\"])\n\n\n" }, { "alpha_fraction": 0.6235954761505127, "alphanum_fraction": 0.6273408532142639, "avg_line_length": 23.31818199157715, "blob_id": "b5ad29567b4db4ff00f13d2c5724e16a43b5f525", "content_id": "899124c12e4e74324990b882ccc17d27492e9652", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 534, "license_type": "no_license", "max_line_length": 68, "num_lines": 22, "path": "/custom3.py", "repo_name": "fmelp/news_classifier", "src_encoding": "UTF-8", "text": "import json \n\nrecords = json.load(open('deduped-data.json')) \n\ncityToOcc = {} \n\nfor record in records:\n\ttry:\n\t\tcity = str(record['shooting-details']['location']['city']).lower()\n\t\tdetail = record['shooting-details']['details']\n\t\tdetail = [str(x) for x in detail]\n\texcept:\n\t\tcontinue\n\telse: \n\t\tif ('The shooting was by a police officer.' in detail): \n\t\t\tif (city in cityToOcc):\n\t\t\t\tcityToOcc[city] = cityToOcc[city] + 1 \n\t\t\telse:\n\t\t\t\tcityToOcc[city] = 1\n\nfor key in cityToOcc: \n\tprint (\"[ '\" + key + \"', \" + str(cityToOcc[key]) + \"],\")" }, { "alpha_fraction": 0.5901374220848083, "alphanum_fraction": 0.5941794514656067, "avg_line_length": 40.20000076293945, "blob_id": "36363def9afd28a950a7a4ff95e9e82ab4952042", "content_id": "dd5bf02bf97f590c088594b39ebe3eb7801d4a37", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1237, "license_type": "no_license", "max_line_length": 103, "num_lines": 30, "path": "/convert_to_csv.py", "repo_name": "fmelp/news_classifier", "src_encoding": "UTF-8", "text": "import sys\nimport csv\nimport json\nimport pickle\nfrom datetime import datetime\n\noutput = csv.writer(open('gun-article-info.csv', 'w'))\n\nheaders = ['url', 'city', 'state', 'text', 'title', 'date', 'people', 'cities', 'states']\n\noutput.writerow(headers)\n\ncolors = {'Person' : '#6495ED'}\n\nfor row in csv.DictReader(open(sys.argv[1])) : \n\ttext = row['text']\n\tif not(row['entities'] == 'NA' or row['keywords'] == 'NA'):\n\t\tentities = json.loads(row['entities'])\n\t\tdatestr = ''\n\t\tif not(row['date'] == '') : \n\t\t\tdate = datetime.strptime(row['date'], '%Y%m%dT%H%M%S')\n\t\t\tdatestr = '%s/%s/%s'%(date.month, date.day, date.year)\n\t\tfor e in entities:\n\t\t\ttext = text.replace('%s'%e['text'], '<span class=%s>%s</span>'%(e['type'],e['text']))\n\t\tfor e in json.loads(row['keywords']):\n\t\t\ttext = text.replace(' %s '%e['text'], ' <span class=Keyword>%s</span> '%(e['text']))\n\t\toutput.writerow([row['url'], row['city'], row['state'], text, row['title'], datestr,\n\t\t\t', '.join([e['text'].encode('ascii', 'ignore') for e in entities if e['type'] == 'Person']),\n\t\t\t', '.join([e['text'].encode('ascii', 'ignore') for e in entities if e['type'] == 'City']),\n\t\t\t', '.join([e['text'].encode('ascii', 'ignore') for e in entities if e['type'] == 'StateOrCounty'])])\n\n" }, { "alpha_fraction": 0.4375757575035095, "alphanum_fraction": 0.5551515221595764, "avg_line_length": 27.34482765197754, "blob_id": "10f505978c84b849f4512dd897627cfd1107ee39", "content_id": "fa8cf879510444bc30acf30a9b2f1ecf4124a5ca", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 825, "license_type": "no_license", "max_line_length": 82, "num_lines": 29, "path": "/who.py", "repo_name": "fmelp/news_classifier", "src_encoding": "UTF-8", "text": "import json \n\nrecords = json.load(open('deduped-data.json')) \n\nageToOcc = {\"1-10\": 0, \"10-20\": 0, \"20-30\": 0, \"30-40\": 0, \"40-50\": 0, \"50+\": 0} \n\nfor record in records: \n\ttry: \n\t\tage = int(record['shooter-details'][0]['age']) \n\texcept: \n\t\tcontinue \n\telse: \n\t\tif 0 < age and age < 10:\n\t\t\tageToOcc[\"1-10\"] = ageToOcc[\"1-10\"] + 1\n\t\telif 10 <= age and age < 20:\n\t\t\tageToOcc[\"10-20\"] = ageToOcc[\"10-20\"] + 1\n\t\telif 20 <= age and age < 30:\n\t\t\tageToOcc[\"20-30\"] = ageToOcc[\"20-30\"] + 1\n\t\telif 30 <= age and age < 40:\n\t\t\tageToOcc[\"30-40\"] = ageToOcc[\"30-40\"] + 1\n\t\telif 40 <= age and age < 50:\n\t\t\tageToOcc[\"40-50\"] = ageToOcc[\"40-50\"] + 1\n\t\telif age >= 50:\n\t\t\tageToOcc[\"50+\"] = ageToOcc[\"50+\"] + 1\n\n\nfor key in ageToOcc: \n\t#gunToShots[key] = gunToShots[gun] / gunToOcc[gun] \n\tprint \"['\" + key + \"', \" + str(ageToOcc[key]) + \"],\" \n\n" }, { "alpha_fraction": 0.5880356431007385, "alphanum_fraction": 0.6045820713043213, "avg_line_length": 25.784090042114258, "blob_id": "4820c61eea4a0a9844722f6d6f81e01573767cad", "content_id": "8d7ca5de2752771d75bf1cd094bd4da383037b2d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2357, "license_type": "no_license", "max_line_length": 104, "num_lines": 88, "path": "/dedup.py", "repo_name": "fmelp/news_classifier", "src_encoding": "UTF-8", "text": "import json\nimport datetime\n\nrecords = json.load(open('aggregated-data.json')) \n\ndeduped = []\n\ndef can_merge(this, that): \n\tcount = 0 \n\t#shooter\n\tthisSD = this['shooter-details'] \n\tif (len(thisSD) > 0):\n\t\tthisSN1 = thisSD[0] \n\t\tthisSN2 = thisSN1['name']\n\t\tthatSD = that['shooter-details'] \n\t\tif (len(thatSD) > 0):\n\t\t\tthatSN1 = thatSD[0] \n\t\t\tthatSN2 = thatSN1['name']\n\t\t\tif (thisSN2 == \"unknown\" or thatSN2 == \"unknown\"): \n\t\t\t\tcount = count + 1 \n\t\t\telif (thisSN2 == thatSN2):\n\t\t\t\tcount = count + 1 \n\t\n\t#victim\n\tthisVD = this['victim-details']\n\tif (len(thisVD) > 0):\n\t\tthisVN1 = thisVD[0] \n\t\tthisVN2 = thisVN1['name'] \n\t\tthatVD = that['victim-details'] \n\t\tif (len(thatVD) > 0):\n\t\t\tthatVN1 = thatVD[0] \n\t\t\tthatVN2 = thatVN1['name'] \n\n\t\t\tif (thisVN2 == thatVN2): \n\t\t\t\tcount = count + 1 \n\n\t#date \n\tthisD = this['shooting-details'] \n\tthisT = thisD['time'] \n\tthisDate = thisT['date'] \n\tthatD = that['shooting-details'] \n\tthatT = thatD['time'] \n\tthatDate = thatT['date'] \n\tif (thisDate == thatDate): \n\t\tcount = count + 1 \n\tif (count >= 2): \n\t\treturn True \n\telse: \n\t\treturn False \n\ndeduped.append(records[0]) \n\nfor this_record in records: \n\tmerged = False \n\tfor that_record in deduped: \n #update fields in deduped with new information added by this_record\n \t\tif can_merge(this_record, that_record): \n \t\t\tmerged = True\n \t\tthisD = this_record['shooting-details'] \n \t\tthisT = thisD['time'] \n \t\tthisDate = thisT['date'] \n \t\tthatD = that_record['shooting-details'] \n \t\tthatT = thatD['time'] \n \t\tthatDate = thatT['date'] \n \t\tthisDateSplit = thisDate.split('/') \n \t\tthatDateSplit = thatDate.split('/')\n \t\t\n \t\tdateThis = None\n \t\tdateThat = None\n\n \t\ttry: \n \t\t\tdateThis = datetime.date(int(thisDateSplit[2]), int(thisDateSplit[0]), int(thisDateSplit[1])) \n \t\t\tdateThat = datetime.date(int(thatDateSplit[2]), int(thatDateSplit[0]), int(thatDateSplit[1]))\n \t\texcept Exception: \n \t\t\tcontinue \n\n \t\tif (dateThis > dateThat): \n \t\t\tprint \"herehreredws\"\n \t\t\tthat_record['article'] = this_record['article'] \n \t\t\tthat_record['shooter-details'] = this_record['shooter-details']\n \t\t\tthat_record['shooting-details'] = this_record['shooting-details']\n\n\n\tif (not merged) : \n\t\tdeduped.append(this_record)\n\n\njson.dump(deduped, open('deduped-data.json', 'w'))\n" } ]
6
jefimenko/restuarants
https://github.com/jefimenko/restuarants
fc230075c5824650cca3f2fea89135ab21b960d2
2230e8c3d966fe7654d1f007ccde8d228b2fb89b
aa13652939261aa06e3f749f4407cf82a17cc901
refs/heads/master
2020-05-30T01:37:41.067365
2015-02-23T06:14:31
2015-02-23T06:14:31
30,920,650
0
0
null
2015-02-17T14:33:41
2015-02-23T06:14:41
2015-02-23T08:34:47
Python
[ { "alpha_fraction": 0.5969309210777283, "alphanum_fraction": 0.6051151156425476, "avg_line_length": 26.3426570892334, "blob_id": "8863a99ec78be3fc2f38a26c564be77066db56cd", "content_id": "414ed6b314bd7f78659a4cfb9810bc33d0c06caf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3910, "license_type": "permissive", "max_line_length": 77, "num_lines": 143, "path": "/scraper.py", "repo_name": "jefimenko/restuarants", "src_encoding": "UTF-8", "text": "from bs4 import BeautifulSoup\nfrom copy import copy\nimport requests\nimport sys\nimport re\n\nTARGET_DOMAIN = 'http://info.kingcounty.gov'\nTARGET_PATH = '/health/ehs/foodsafety/inspections/Results.aspx'\nTARGET_QUERY = {\n 'Output': 'W',\n 'Business_Name': '',\n 'Business_Address': '',\n 'Longitude': '',\n 'Latitude': '',\n 'City': '',\n 'Zip_Code': '',\n 'Inspection_Type': 'All',\n 'Inspection_Start': '',\n 'Inspection_End': '',\n 'Inspection_Closed_Business': 'A',\n 'Violation_Points': '',\n 'Violation_Red_Points': '',\n 'Violation_Descr': '',\n 'Fuzzy_Search': 'N',\n 'Sort': 'H',\n}\n\n\ndef get_inspection_page(**kwargs):\n url = TARGET_DOMAIN + TARGET_PATH\n params = copy(TARGET_QUERY)\n\n for key, val in kwargs.items():\n if key in TARGET_QUERY:\n params[key] = val\n response = requests.get(url, params=params)\n response.raise_for_status\n return response.content, response.encoding\n\n\ndef load_inspection_page():\n with open('inspection_page.html') as page:\n data = page.readlines()\n doc = ''\n enc = data[-1]\n for line in data[:-1]:\n doc = '{}{}'.format(doc, line)\n return doc, enc\n\n\ndef save_inspection_page(content, encoding):\n with open('inspection_page.html', 'w') as page:\n page.write(content)\n page.write('\\r\\n' + encoding)\n\n\ndef parse_source(content, encoding='utf-8'):\n parsed = BeautifulSoup(content, from_encoding=encoding)\n return parsed\n\n\ndef extract_data_listings(soup):\n # Select a list of all html tags with class pattern PR[]~\n return soup.find_all(id=re.compile(r'PR\\d+~'))\n\n\ndef extract_restuarant_metadata(listing):\n metadata = {}\n metadata_rows = listing.find('tbody').find_all(\n has_two_tds, recursive=False)\n for row in metadata_rows:\n key, value = row.find_all('td', recursive=False)\n key, value = clean_data(key), clean_data(value)\n if key:\n metadata[key] = value\n else:\n metadata['Address'] = '{}, {}'.format(metadata['Address'], value)\n\n return metadata\n\n\ndef extract_score_data(listing):\n inspection_rows = listing.find_all(is_inspection_row)\n total, high_score, inspections, average = 0, 0, 0, 0\n for row in inspection_rows:\n score = int(row.find_all('td', recursive=False)[2].text)\n if score > high_score:\n high_score = score\n total += score\n inspections += 1\n\n if inspections:\n average = float(total)/inspections\n return {\n 'Average Score': average,\n 'High Score': high_score,\n 'Total Inspections': inspections}\n\n\ndef has_two_tds(tag):\n return tag.name == 'tr' and len(tag.find_all('td', recursive=False)) == 2\n\n\ndef is_inspection_row(tag):\n if tag.name == 'tr':\n row_cells = tag.find_all('td', recursive=False)\n tag_text = clean_data(row_cells[0]).lower()\n return len(row_cells) == 4 \\\n and not tag_text.startswith('inspection') \\\n and 'inspection' in tag_text\n else:\n return False\n\n\ndef clean_data(cell):\n try:\n return cell.string.strip('- :\\n')\n except AttributeError:\n return ''\n\nif __name__ == '__main__':\n\n if len(sys.argv) == 1:\n params = {}\n params['Inspection_Start'] = '2/1/2014'\n params['Inspection_End'] = '2/1/2015'\n params['Zip_Code'] = '98006'\n content, encoding = get_inspection_page(**params)\n save_inspection_page(content, encoding)\n else:\n content, encoding = load_inspection_page()\n\n doc = parse_source(content, encoding)\n listings = extract_data_listings(doc)\n for listing in listings:\n metadata = extract_restuarant_metadata(listing)\n restuarant_data = extract_score_data(listing)\n restuarant_data.update(metadata)\n print restuarant_data\n print\n\n print len(listings)\n # print listings[0].prettify()\n" }, { "alpha_fraction": 0.8255813717842102, "alphanum_fraction": 0.8275193572044373, "avg_line_length": 50.599998474121094, "blob_id": "889c828871ef38672c9f97c8779a5cc76524dd48", "content_id": "97588e7c37dcac3722a756bfe902daa72857a0dd", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 516, "license_type": "permissive", "max_line_length": 101, "num_lines": 10, "path": "/README.md", "repo_name": "jefimenko/restuarants", "src_encoding": "UTF-8", "text": "# restuarants\n\nA scraper for King County's Public Health website that forms a query using python's requests module.\nThe response is parsed using BeautifulSoup, and currently, relevant information about each business\nwithin a range of dates is extracted and stored in a dictionary, including information about\ninspection scores.\n\nReferenced BeautifulSoup documentation extensively for the use of find_all() and attributes available\nfor BeautifulSoup tag objects.\nhttp://www.crummy.com/software/BeautifulSoup/bs4/doc/\n" } ]
2
trove/trove-superalbums
https://github.com/trove/trove-superalbums
89182aceb34a529d592cd2229a300caed4d5962f
3e325dc8c37f4cca9e1318dbc1cee5b348024414
98002b104211ac7feca72822da9ce738408bc7ab
refs/heads/master
2020-01-22T05:04:19.362556
2011-08-15T23:22:04
2011-08-15T23:22:04
1,060,868
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.516339898109436, "alphanum_fraction": 0.6928104758262634, "avg_line_length": 16, "blob_id": "3065e2d9e40fb3cc0910aee4acb8e0f7183a52f5", "content_id": "048c4e32e30d0dee42efcd0642aac172ecf79ca9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 153, "license_type": "permissive", "max_line_length": 21, "num_lines": 9, "path": "/requirements.txt", "repo_name": "trove/trove-superalbums", "src_encoding": "UTF-8", "text": "Django==1.1.2\nMySQL-python==1.2.3\nPIL==1.1.7\ndistribute==0.6.14\ndjango-tagging==0.3.1\noauth==1.0.1\npython-dateutil==1.5\nsimplejson==2.1.2\nwsgiref==0.1.2\n" }, { "alpha_fraction": 0.603519082069397, "alphanum_fraction": 0.603519082069397, "avg_line_length": 35.29787063598633, "blob_id": "c5f2ff0ca14f419ba2e8a9dc252ebfb12a835f1e", "content_id": "8401fa1af349e9a6eaa575a55950713d8e24ea72", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1705, "license_type": "permissive", "max_line_length": 119, "num_lines": 47, "path": "/core/auth_backends.py", "repo_name": "trove/trove-superalbums", "src_encoding": "UTF-8", "text": "from django.conf import settings\nfrom django.contrib.auth.backends import ModelBackend\nfrom django.contrib.auth.models import User, UserManager\nfrom core.models import UserProfile\nfrom troveclient import TroveAPI\n\n\nclass TroveAuthenticationToken:\n pass\n\nclass TroveBackend:\n \n def authenticate(self, oauth_token=None):\n if oauth_token is None:\n return None\n user_info = None\n try:\n api = TroveAPI(settings.TROVE_APP_KEY, settings.TROVE_APP_SECRET, ['photos'],oauth_token)\n user_info = api.get_user_info()\n except:\n return None\n \n try:\n trove_profile = UserProfile.objects.get(trove_user_id=user_info['trove_internal_id'])\n return trove_profile.user\n except UserProfile.DoesNotExist:\n user = User()\n if user_info.has_key('trove_username'):\n user.username = user_info['trove_username']\n else:\n user.username = user_info['trove_internal_id'] # We use the trove internal id if there is no user name\n user.first_name = user_info['first_name']\n user.last_name = user_info['last_name']\n user.save()\n trove_profile = UserProfile()\n trove_profile.trove_user_id = user_info['trove_internal_id']\n trove_profile.trove_access_token_key = oauth_token.key\n trove_profile.trove_access_token_secret = oauth_token.secret\n trove_profile.user = user\n trove_profile.save()\n return user\n \n def get_user(self, user_id):\n try:\n return User.objects.get(id=user_id)\n except:\n return None" }, { "alpha_fraction": 0.5519999861717224, "alphanum_fraction": 0.5519999861717224, "avg_line_length": 40.83333206176758, "blob_id": "83610612ed703ca06acb1d634dc6b063f922ef93", "content_id": "c1b0bc0546b2aa90b5debd84acc7da0a8866af7d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 250, "license_type": "permissive", "max_line_length": 78, "num_lines": 6, "path": "/superalbums/urls.py", "repo_name": "trove/trove-superalbums", "src_encoding": "UTF-8", "text": "from django.conf.urls.defaults import patterns, url\n\nurlpatterns = patterns('superalbums.views',\n url(r'^albums/(?P<album>.*)/$', 'album', name='album'),\n url(r'^albums/$', 'my_albums', name='my_albums'),\n)" }, { "alpha_fraction": 0.6064446568489075, "alphanum_fraction": 0.6091411709785461, "avg_line_length": 35.90049743652344, "blob_id": "e32afeec3645c40fdb049a199bdd5c0526f28755", "content_id": "e77b7d1ab394278df630977df51f37ba9494bbc1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7417, "license_type": "permissive", "max_line_length": 106, "num_lines": 201, "path": "/superalbums/views.py", "repo_name": "trove/trove-superalbums", "src_encoding": "UTF-8", "text": "from django.contrib.auth.decorators import login_required\nfrom django.shortcuts import render_to_response, get_object_or_404\nfrom django.template.context import RequestContext\nfrom superalbums.models import *\nfrom django.http import HttpResponseRedirect,HttpResponseForbidden\nfrom troveclient import TroveAPI\nfrom core.models import UserProfile\nfrom django.conf import settings\nfrom oauth.oauth import OAuthToken\nfrom troveclient.Objects import Query\nimport uuid,os\n\ndef get_trove_api_for_user(user):\n trove_profile = UserProfile.objects.get(user=user)\n oauth_token = OAuthToken(trove_profile.trove_access_token_key,trove_profile.trove_access_token_secret)\n api = TroveAPI(settings.TROVE_APP_KEY, settings.TROVE_APP_SECRET, ['photos'],oauth_token)\n return api \n\ndef get_full_file_path(instance, filename):\n ext = filename.split('.')[-1]\n filename = \"%s.%s\" % (instance.uuid, ext)\n return os.path.join(settings.LOCAL_PHOTO_STORAGE_FULL_PATH, filename)\n\ndef get_file_path(instance, filename):\n ext = filename.split('.')[-1]\n filename = \"%s.%s\" % (instance.uuid, ext)\n return os.path.join(settings.LOCAL_PHOTO_STORAGE_FOLDER, filename)\n\ndef handle_uploaded_file(f,new_file_name):\n destination = open(new_file_name, 'wb+')\n for chunk in f.chunks():\n destination.write(chunk)\n destination.close()\n\n@login_required\ndef photo(request):\n template = 'view-photo.html'\n context_vars = {}\n \n if request.POST:\n photo = LocalPhoto()\n try:\n photo.tags = tags\n except:\n photo.tags = ''\n try:\n file = request.FILES['photo_file']\n title = request.POST['title']\n description = request.POST['description']\n tags = request.POST['tags']\n photo.uuid = uuid.uuid4()\n photo_file = get_full_file_path(photo,file.name)\n handle_uploaded_file(file,photo_file)\n photo.photo = get_file_path(photo,file.name)\n photo.title = title\n photo.description = description\n photo.user = request.user\n tags_list = tags.split(',')\n photo.tags = tags_list\n \n photo.save()\n api = get_trove_api_for_user(request.user)\n profile = UserProfile.objects.get(user=request.user)\n from troveclient.Objects import Photo\n trove_client_photo = Photo()\n trove_client_photo.height = photo.photo.height\n trove_client_photo.width = photo.photo.width\n trove_client_photo.owner = profile.trove_user_id\n trove_client_photo.date = photo.date_uploaded\n trove_client_photo.description = photo.description\n trove_client_photo.id = str(photo.uuid)\n trove_client_photo.tags = photo.tags\n trove_client_photo.title = photo.title\n urls={'original':photo.photo.url,'thumbnail':photo.thumbnail.url}\n trove_client_photo.urls = urls\n trove_photo_list = [trove_client_photo]\n \n trove_ids = api.push_photos(profile.trove_user_id, trove_photo_list)\n print trove_ids\n return HttpResponseRedirect('/photos/' + str(photo.uuid))\n except KeyError, e:\n print 'doh!'\n print e\n pass\n else:\n template = 'new-edit-photo.html'\n \n context = RequestContext(request,context_vars)\n\n return render_to_response(template,context,mimetype=\"text/html\")\n\ndef view_photo(request,photo):\n template = 'view-photo.html'\n context_vars = {}\n photo = get_object_or_404(LocalPhoto, uuid__exact=photo)\n context_vars['photo'] = photo\n context = RequestContext(request,context_vars)\n\n return render_to_response(template,context,mimetype=\"text/html\")\n\n@login_required\ndef album(request):\n template = 'view-album.html'\n context_vars = {}\n if request.POST:\n try:\n album_name = request.POST['album_name']\n # whee a new album\n new_album = SuperAlbum()\n new_album.album_name = album_name\n new_album.user = request.user\n new_album.save()\n edit_url = '%s%s%s' % ('/albums/',new_album.slug,'/edit')\n return HttpResponseRedirect(edit_url)\n except KeyError:\n pass\n else:\n template = 'new-album-name.html'\n \n context = RequestContext(request,context_vars)\n\n return render_to_response(template,context,mimetype=\"text/html\")\n\n@login_required\ndef edit_album(request,album):\n template = 'edit-album.html'\n context_vars = {}\n album = get_object_or_404(SuperAlbum, slug__exact=album)\n message=False\n api = get_trove_api_for_user(request.user)\n if album.user.id != request.user.id:\n return HttpResponseForbidden()\n if request.POST:\n SuperAlbumPhoto.objects.filter(album=album).delete()\n photo_ids = request.POST['photo_ids']\n ids = photo_ids.split('|')\n orderer=0\n \n users_photos = api.get_photos()\n \n for id in ids:\n ## make sure we have a TrovePhoto\n try:\n trove_photo = TrovePhoto.objects.get(trove_id=id)\n except TrovePhoto.DoesNotExist:\n trove_photo = TrovePhoto()\n trove_photo.user=request.user\n trove_photo.trove_id = id\n \n ## OH LOOK, STUPIDER\n for photo in users_photos.objects:\n print photo.urls\n print photo.trove_id\n if photo.trove_id == id:\n try:\n trove_photo.full_size = photo.urls['original']\n except KeyError:\n trove_photo.full_size = photo.urls['large']\n try:\n trove_photo.thumbnail = photo.urls['thumbnail']\n except KeyError:\n trove_photo.thumbnail = photo.urls['small']\n trove_photo.save()\n super_album_photo = SuperAlbumPhoto()\n super_album_photo.user = request.user\n super_album_photo.album=album\n super_album_photo.trove_photo=trove_photo\n super_album_photo.order=orderer\n super_album_photo.save()\n orderer = orderer+1\n message = \"Your changes have been saved\"\n\n photos = api.get_photos()\n context_vars['message'] = message\n context_vars['photos'] = photos\n context_vars['album'] = album\n context = RequestContext(request,context_vars)\n\n return render_to_response(template,context,mimetype=\"text/html\")\n\n\ndef view_album(request,album):\n template = 'view-album.html'\n context_vars = {}\n album = get_object_or_404(SuperAlbum, slug__exact=album)\n context_vars['album'] = album\n context_vars['url'] = request.build_absolute_uri()\n context = RequestContext(request,context_vars)\n\n return render_to_response(template,context,mimetype=\"text/html\")\n\n@login_required\ndef delete_album(request,album):\n template = 'view-album.html'\n context_vars = {}\n album = get_object_or_404(SuperAlbum, slug__exact=album)\n album.delete()\n context_vars['deleted_album'] = album.album_name\n context = RequestContext(request,context_vars)\n\n return render_to_response(template,context,mimetype=\"text/html\")\n" }, { "alpha_fraction": 0.6381514072418213, "alphanum_fraction": 0.6381514072418213, "avg_line_length": 35.32143020629883, "blob_id": "4582ba34583e1ed9609d8152ea3937db5584331d", "content_id": "d372ba7f6acb60376024b46158905431d4f114d2", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1017, "license_type": "permissive", "max_line_length": 76, "num_lines": 28, "path": "/urls.py", "repo_name": "trove/trove-superalbums", "src_encoding": "UTF-8", "text": "from django.conf.urls.defaults import *\n\nfrom django.contrib import admin\n\nfrom core import urls as core_urls\n\nadmin.autodiscover()\n\nurlpatterns = patterns('',\n # Example:\n # (r'^trove_demo/', include('trove_demo.foo.urls')),\n\n # Uncomment the admin/doc line below and add 'django.contrib.admindocs' \n # to INSTALLED_APPS to enable admin documentation:\n # (r'^admin/doc/', include('django.contrib.admindocs.urls')),\n\n (r'^admin/', include(admin.site.urls)),\n (r'^photo/', 'superalbums.views.photo'),\n (r'^photos/(?P<photo>.*)/$', 'superalbums.views.view_photo'),\n (r'^album/', 'superalbums.views.album'),\n (r'^albums/(?P<album>.*)/edit$', 'superalbums.views.edit_album'),\n (r'^albums/(?P<album>.*)/delete', 'superalbums.views.delete_album'),\n (r'^albums/(?P<album>.*)/$', 'superalbums.views.view_album'),\n #(r'^albums/', 'superalbums.views.albums'),\n (r'^logout/','core.views.logout_view'),\n (r'^css/main.css','core.views.get_main_css'),\n (r'^$',include(core_urls)),\n)\n" }, { "alpha_fraction": 0.6206286549568176, "alphanum_fraction": 0.6290766000747681, "avg_line_length": 39.38888931274414, "blob_id": "3ad17c356f3cfca9d055d13d952af2e518fac18e", "content_id": "1ea118979a90c9411108cfac8e0efa72fbb712ee", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5090, "license_type": "permissive", "max_line_length": 85, "num_lines": 126, "path": "/superalbums/models.py", "repo_name": "trove/trove-superalbums", "src_encoding": "UTF-8", "text": "from django.db import models\nfrom django.template.defaultfilters import slugify\nfrom tagging.fields import TagField\nfrom tagging.utils import parse_tag_input\nfrom django.contrib.auth.models import User\nfrom django.conf import settings\nimport uuid,os\nimport datetime\n# Create your models here.\n\ndef slugify_uniquely(value, model, slugfield=\"slug\"):\n \"\"\"Returns a slug on a name which is unique within a model's table\n\n This code suffers a race condition between when a unique\n slug is determined and when the object with that slug is saved.\n It's also not exactly database friendly if there is a high\n likelyhood of common slugs being attempted.\n\n A good usage pattern for this code would be to add a custom save()\n method to a model with a slug field along the lines of:\n\n from django.template.defaultfilters import slugify\n\n def save(self):\n if not self.id:\n # replace self.name with your prepopulate_from field\n self.slug = SlugifyUniquely(self.name, self.__class__)\n super(self.__class__, self).save()\n\n Original pattern discussed at\n http://www.b-list.org/weblog/2006/11/02/django-tips-auto-populated-fields\n \"\"\"\n suffix = 0\n potential = base = slugify(value)\n while True:\n if suffix:\n potential = \"-\".join([base, str(suffix)])\n if not model.objects.filter(**{slugfield: potential}).count():\n return potential\n # we hit a conflicting slug, so bump the suffix & try again\n suffix += 1\n\ndef get_file_path(instance, filename):\n ext = filename.split('.')[-1]\n filename = \"%s.%s\" % (instance.uuid, ext)\n return os.path.join(settings.LOCAL_PHOTO_STORAGE_FOLDER, filename)\n\nclass TrovePhoto(models.Model):\n user = models.ForeignKey(User)\n full_size = models.URLField(max_length=255)\n thumbnail = models.URLField(max_length=255)\n trove_id = models.CharField(max_length=200)\n \n def get_absolute_url(self):\n return self.url\n \nclass LocalPhoto(models.Model):\n user = models.ForeignKey(User)\n uuid = models.CharField(max_length=200)\n title = models.CharField(max_length=100,null=True,blank=True)\n description = models.CharField(max_length=200,null=True,blank=True)\n photo = models.ImageField(upload_to=get_file_path,null=True,blank=True)\n date_uploaded = models.DateTimeField()\n tags = TagField()\n thumbnail = models.ImageField(upload_to=\"thumbnails/\", editable=False)\n\n def save(self):\n if not self.id:\n self.date_uploaded = datetime.datetime.now()\n \n from PIL import Image\n from cStringIO import StringIO\n from django.core.files.uploadedfile import SimpleUploadedFile\n \n # Set our max thumbnail size in a tuple (max width, max height)\n THUMBNAIL_SIZE = (50, 50)\n \n # Open original photo which we want to thumbnail using PIL's Image\n # object\n image = Image.open(settings.MEDIA_ROOT + self.photo.name)\n \n # Convert to RGB if necessary\n # Thanks to Limodou on DjangoSnippets.org\n # http://www.djangosnippets.org/snippets/20/\n if image.mode not in ('L', 'RGB'):\n image = image.convert('RGB')\n \n # We use our PIL Image object to create the thumbnail, which already\n # has a thumbnail() convenience method that contrains proportions.\n # Additionally, we use Image.ANTIALIAS to make the image look better.\n # Without antialiasing the image pattern artifacts may result.\n image.thumbnail(THUMBNAIL_SIZE, Image.ANTIALIAS)\n \n # Save the thumbnail\n temp_handle = StringIO()\n image.save(temp_handle, 'png')\n temp_handle.seek(0)\n # Save to the thumbnail field\n suf = SimpleUploadedFile(os.path.split(self.photo.name)[-1],\n temp_handle.read(), content_type='image/png')\n self.thumbnail.save(suf.name+'.png', suf, save=False)\n\n # Save this photo instance\n super(LocalPhoto, self).save()\n\n def get_absolute_url(self):\n return \"%s%s\" % (settings.LOCAL_PHOTO_URL,self.photo)\n \nclass SuperAlbumPhoto(TrovePhoto):\n trove_photo = models.ForeignKey(TrovePhoto,related_name='superalbum_trove_photo')\n album = models.ForeignKey('SuperAlbum')\n order = models.IntegerField()\n \nclass SuperAlbum(models.Model):\n user = models.ForeignKey(User)\n album_name = models.CharField(max_length=100)\n slug = models.SlugField(max_length=255,unique=True)\n \n def save(self, *args, **kw):\n if not self.slug:\n self.slug = slugify_uniquely(self.album_name,self.__class__)\n super(SuperAlbum, self).save(*args, **kw)\n \n def get_photos(self):\n photos = SuperAlbumPhoto.objects.filter(album=self).order_by('order')\n return photos\n\n" }, { "alpha_fraction": 0.6143369078636169, "alphanum_fraction": 0.6315411925315857, "avg_line_length": 33.900001525878906, "blob_id": "3f6b1550e1904595620fd0f924c1014df041f3a7", "content_id": "ba851f97facbf7d06516eae7544dbeedaff49be6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 1395, "license_type": "permissive", "max_line_length": 330, "num_lines": 40, "path": "/templates/view-album.html", "repo_name": "trove/trove-superalbums", "src_encoding": "UTF-8", "text": "{% extends 'base.html' %}\n\n{% block extra_javascript %}\n<script type=\"text/javascript\">\n // Toggle sharing drawer when Share button is clicked\n $('#album-share').live('click', function(){\n $('#share').slideToggle(180);\n });\n</script>\n{% endblock %}\n{% block content %}\n{% if deleted_album %}\n<p>\nThe album {{ deleted_album }} was deleted.\n</p>\n<p>\n<a href=\"/\">Return Home</a>, or\n</p>\n<p>\n<a href=\"/album\">Create a new album</a>\n</p>\n{% else %}\n<a href=\"delete\" id=\"album-delete\" class=\"button\">delete</a> <a href=\"edit\" id=\"album-edit\" class=\"button\">edit</a> <a href=\"javascript:void(0);\" id=\"album-share\" class=\"button\">share</a> \n<h2>{{ album.album_name }} Photos</h2>\n\n<div id=\"share\">\n Share via... <a href=\"http://twitter.com/?status=Check%20out%20my%20photo%20album%20at%20SuperAlbums!%20{{url}}\"><img src=\"http://beta.yourtrove.com/static/images/services-twitter.png\" /></a> <a href=\"http://www.facebook.com/sharer.php?u={{ url }}\"><img src=\"http://beta.yourtrove.com/static/images/services-facebook.png\" /></a>\n</div>\n\n<div id=\"album-thumbnails\" class=\"view\">\n <ul class=\"photo-list\">\n\t{% for photo in album.get_photos %}\n\t<li><a href=\"{{ photo.trove_photo.full_size }}\"><img src=\"{{ photo.trove_photo.thumbnail }}\" id=\"{{ photo.trove_photo.trove_id }}\" width=\"96\" height=\"72\" /></a></li>\n\t{% endfor %}\n </ul>\n </div>\n \n{% endif %}\n<div class=\"clear\"></div>\n{% endblock %}" }, { "alpha_fraction": 0.6251450777053833, "alphanum_fraction": 0.6255319118499756, "avg_line_length": 35.94285583496094, "blob_id": "238dfe17ae66af15ba58941094c309a1a3c7c028", "content_id": "7bc986c397d69ea8a0198afa3e343c9d2d646140", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2585, "license_type": "permissive", "max_line_length": 85, "num_lines": 70, "path": "/core/views.py", "repo_name": "trove/trove-superalbums", "src_encoding": "UTF-8", "text": "# Create your views here.\nfrom django.shortcuts import render_to_response\nfrom django.template.context import RequestContext\nfrom django.contrib.auth import logout\nfrom troveclient import TroveAPI\nfrom troveclient.Objects import Query\nimport datetime\nfrom superalbums.models import SuperAlbum\nfrom django.conf import settings\nfrom django.contrib.auth import authenticate, login\nfrom django.http import HttpResponseRedirect\n\ndef get_started_or_go_home(request):\n template = 'home.html'\n context_vars = {}\n context = None\n user_albums = SuperAlbum.objects.filter(user=request.user)\n if len(user_albums) < 1:\n return HttpResponseRedirect('/album/')\n context_vars['user_albums'] = user_albums\n context = RequestContext(request,context_vars)\n\n return render_to_response(template,context,mimetype=\"text/html\")\n\ndef home(request):\n template = 'welcome.html'\n context_vars = {}\n context = None\n if request.user.is_authenticated():\n return get_started_or_go_home(request)\n else:\n api = TroveAPI(settings.TROVE_APP_KEY, settings.TROVE_APP_SECRET, ['photos'])\n if request.GET:\n try:\n oauth_token_key = request.GET['oauth_token']\n request_token = request.session['trove_request_token']\n oauth_token = api.get_access_token(request_token)\n user = authenticate(oauth_token = oauth_token)\n if user is not None:\n if user.is_active:\n login(request, user)\n return get_started_or_go_home(request)\n else:\n # Return a 'disabled account' error message\n pass\n else:\n # TODO FIXME failed login\n pass\n except KeyError:\n pass\n else:\n try:\n request_token = request.session['trove_request_token']\n except:\n request_token = api.get_request_token()\n request.session['trove_request_token']=request_token\n url = api.get_authorization_url(request_token)\n context_vars['trove_connect_url']=url\n context = RequestContext(request,context_vars)\n\n return render_to_response(template,context,mimetype=\"text/html\")\n\ndef get_main_css(request):\n template = 'css/main.css'\n context = RequestContext(request)\n return render_to_response(template,context,mimetype=\"text/css\")\n\ndef logout_view(request):\n logout(request)\n return HttpResponseRedirect('/')" }, { "alpha_fraction": 0.7384988069534302, "alphanum_fraction": 0.7602905631065369, "avg_line_length": 40.400001525878906, "blob_id": "0a28a219e5cec4856c9800136af1332c3cd73657", "content_id": "3387a7d188a75670c277519bcd061972956bac8e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 413, "license_type": "permissive", "max_line_length": 85, "num_lines": 10, "path": "/core/models.py", "repo_name": "trove/trove-superalbums", "src_encoding": "UTF-8", "text": "from django.db import models\nfrom django.contrib.auth.models import User\n# Create your models here.\n\n\nclass UserProfile(models.Model):\n user = models.OneToOneField(User, 'id') \n trove_user_id = models.CharField(max_length=200,unique=True)\n trove_access_token_key = models.CharField(max_length=200,blank=True,null=True)\n trove_access_token_secret = models.CharField(max_length=200,blank=True,null=True)" } ]
9
dmccoystephenson/slow-printer
https://github.com/dmccoystephenson/slow-printer
edec188f43954e4e8e5890f2f66de70b1f9bb573
a235e304a2fa096102a382139a1e7d81f3068b49
a885841c89cea7f3da59f3c3be770b61023375d7
refs/heads/master
2020-05-25T07:25:22.370530
2017-03-14T08:03:15
2017-03-14T08:03:15
84,921,807
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6569037437438965, "alphanum_fraction": 0.6652719378471375, "avg_line_length": 18.91666603088379, "blob_id": "530f831e99607658c6974e6745926199f0038b97", "content_id": "f5c098b632d159c6c4a40d06b32bc0bbb0e78788", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 239, "license_type": "no_license", "max_line_length": 64, "num_lines": 12, "path": "/main.py", "repo_name": "dmccoystephenson/slow-printer", "src_encoding": "UTF-8", "text": "import sys\nimport time\n\ndef slowprint(string):\n\tfor i in string:\n\t\tsys.stdout.write(i)\n\t\tsys.stdout.flush()\n\t\ttime.sleep(.03)\n\tprint \"\\n\"\n\nif __name__ == \"__main__\": # test if not used in another program\n\tslowprint(\"Welcome to the game!\")\n" } ]
1
bryanpedini/2nd-keyboard-shortcuts
https://github.com/bryanpedini/2nd-keyboard-shortcuts
75ace496300e2343e2480e43c0c416a84195dbd7
e81442b492656e29bb8833a5d9bf6e589480da0a
20e8a087a7f34234fb9fd13d88d8f7efdc78d469
refs/heads/master
2020-06-25T09:35:21.370495
2019-07-28T10:39:50
2019-07-28T10:39:50
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7345132827758789, "alphanum_fraction": 0.752212405204773, "avg_line_length": 17.83333396911621, "blob_id": "9aad2a3ad6c35051703a6dfa229e4b378d513dc3", "content_id": "a4c662fc3ea6ea3ecfa36c17133b1bf1e5993636", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 113, "license_type": "no_license", "max_line_length": 55, "num_lines": 6, "path": "/centos_setup.sh", "repo_name": "bryanpedini/2nd-keyboard-shortcuts", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\nsudo yum install -y python-pip python3-pip &> /dev/null\n\npip install keyboard\npip3 install keyboard\n" }, { "alpha_fraction": 0.7345132827758789, "alphanum_fraction": 0.752212405204773, "avg_line_length": 17.83333396911621, "blob_id": "c2968c50b14a5fd46664982cd86cdea1fb0ebb73", "content_id": "d398f507068b43f532747bfae213f79a8361448a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 113, "license_type": "no_license", "max_line_length": 55, "num_lines": 6, "path": "/debian_setup.sh", "repo_name": "bryanpedini/2nd-keyboard-shortcuts", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\nsudo apt install -y python-pip python3-pip &> /dev/null\n\npip install keyboard\npip3 install keyboard\n" }, { "alpha_fraction": 0.6258823275566101, "alphanum_fraction": 0.6399999856948853, "avg_line_length": 27.33333396911621, "blob_id": "2c1e9548dac620eeebd5dc84cc748db57a3c8d5c", "content_id": "419c5d0df7ee7c849161909768584879936ff217", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 425, "license_type": "no_license", "max_line_length": 113, "num_lines": 15, "path": "/identify.py", "repo_name": "bryanpedini/2nd-keyboard-shortcuts", "src_encoding": "UTF-8", "text": "import keyboard, sys, time\n\ndef identify ( event ):\n keyfile = open ( '2ndkeyboard', 'w' )\n keyfile.write ( str ( event.device ) )\n keyfile.close ( )\n\nprint ( \"Press any key to identify your second keyboard, don't forget to repeat this process at every reboot! \" )\ntime.sleep ( 0.25 )\nHook = keyboard.hook ( identify )\nif str ( sys.version ) [ :1 ] == '3':\n input ( \"\" )\nelse:\n raw_input ( \"\" )\nkeyboard.unhook ( Hook )\n" }, { "alpha_fraction": 0.7207207083702087, "alphanum_fraction": 0.7387387156486511, "avg_line_length": 17.5, "blob_id": "9fea758af830e664929f830c8ecd07c3e04030db", "content_id": "868292b782d62c2da34f124f4f26991299cd3a56", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 111, "license_type": "no_license", "max_line_length": 53, "num_lines": 6, "path": "/arch_setup.sh", "repo_name": "bryanpedini/2nd-keyboard-shortcuts", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\nsudo pacman -S -y python-pip python3-pip &> /dev/null\n\npip install keyboard\npip3 install keyboard\n" }, { "alpha_fraction": 0.6842105388641357, "alphanum_fraction": 0.6887871623039246, "avg_line_length": 18.863636016845703, "blob_id": "e9f19de00df4627a4c5083bfdb6251e055117f46", "content_id": "56d6222ec4b5d5a9d3f534ab223015983da56ade", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 437, "license_type": "no_license", "max_line_length": 40, "num_lines": 22, "path": "/main.py", "repo_name": "bryanpedini/2nd-keyboard-shortcuts", "src_encoding": "UTF-8", "text": "import keyboard, time, os.path\n\nsecondKeyboard = None\n\ndef shortcuts ( event ):\n if event.device == secondKeyboard:\n print (\"second keyboard!\")\n\nif not os.path.exists ( '2ndkeyboard' ):\n os.system ( 'identify.py' )\nelse:\n keyfile = open ( '2ndkeyboard' )\n secondKeyboard = keyfile.read ( )\n keyfile.close ( )\n\nkeyboard.hook ( shortcuts )\n\ntry:\n keyboard.wait()\nexcept KeyboardInterrupt:\n keyboard.unhook_all_hotkeys ( )\n exit\n" }, { "alpha_fraction": 0.778761088848114, "alphanum_fraction": 0.7831858396530151, "avg_line_length": 44.20000076293945, "blob_id": "c7133b7b18a8115eb88a0b5d59b450335a33ce9c", "content_id": "9299abee46a21f66cb8600724b620bd688e0200f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 226, "license_type": "no_license", "max_line_length": 120, "num_lines": 5, "path": "/README.md", "repo_name": "bryanpedini/2nd-keyboard-shortcuts", "src_encoding": "UTF-8", "text": "# 2nd Keyboard Shortcuts\n\nCreated in python, based on the `keyboard` pip module, written in python.\n<br>\nThis code can recognize a second keyboard and treat the inputs from it as shortcuts, doing all sort of different things.\n" } ]
6
Tsel/tradenetworktools
https://github.com/Tsel/tradenetworktools
4bdf0a4d6cc837dda64d5b51125e9c3e8218eb38
a48cb05f3f0c116f8f40f561dfe5c5ef7a0adb59
7e24ec8f85546da7c9c38d7aad508ffa0ecad5b3
refs/heads/master
2021-04-27T04:49:10.098913
2018-02-23T07:33:18
2018-02-23T07:33:18
122,586,721
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6027777791023254, "alphanum_fraction": 0.6027777791023254, "avg_line_length": 34.939998626708984, "blob_id": "f4fa19d5f7d3f66c7f5216b136d286e6c43d968a", "content_id": "9086a4536e7025d7f9d1d0be1dae3715096dfa9d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1800, "license_type": "no_license", "max_line_length": 88, "num_lines": 50, "path": "/tradenetworktool.py", "repo_name": "Tsel/tradenetworktools", "src_encoding": "UTF-8", "text": "import pandas as pd\n\nclass TradeNetworkTool:\n \"\"\" Defines specific methods used in the analysis of trade networks \"\"\"\n def __init__(self):\n self.fnedgelist = \"\"\n\n\n def readedgelist(self, fnedgelist, dtypes, dates=None):\n self.fnedgelist = fnedgelist\n \"\"\"\n see also the comment in the from_edgelist method\n\n An edge list is the basic building block of the\n networks used as a model for animal trade between farms.\n\n As the name suggests, an edge list is a list of edges.\n Each line of an edge list corresponds to exactly one edge.\n Each line contains at least the names of the nodes connected\n by an edge and any other attributes of the edge.\n\n The list of edges is called el and the name of the file containing\n the data is called fnedgelist.\n\n The data of an edge list are read from cvs file (comma separated file)\n into a pandas dataframe, because here the possibilities of an extensive\n data processing exist. In addition, a pandas dataframe can be translated\n directly into a networkx graph.\n\n\n reads edgelist form csv and creates pandas dataframe\n scv file needs to have the following column header:\n\n S,T,VOL,\"ZUGA_DATE\",\"MELD_DATE\",MELD_DELAY\n\n all columns except VOL and MELD_DELAY (which are of type int) are of type string\n\n :param fnedgelist:\n \"\"\"\n\n if dates == None:\n return pd.read_csv(fnedgelist, sep=',',\n dtype=dtypes\n )\n\n return pd.read_csv(fnedgelist, sep=',',\n dtype=dtypes,\n parse_dates=dates,\n infer_datetime_format=True\n )\n\n\n\n" } ]
1
chrisowensdev/terminal-kombat
https://github.com/chrisowensdev/terminal-kombat
ecfc21aece8010c514719cc3d21faec59c792f72
db2bb576867d9f0315bb44d53539657e37e120b9
e942fb08a6ddcb2786d14011b158fd7f446722c5
refs/heads/master
2022-12-02T08:46:04.424875
2020-08-18T00:50:03
2020-08-18T00:50:03
285,098,930
0
1
null
null
null
null
null
[ { "alpha_fraction": 0.5568547248840332, "alphanum_fraction": 0.564747154712677, "avg_line_length": 33.908164978027344, "blob_id": "e3359a86355bc2f7e28bd934801bdbfac9857d75", "content_id": "7a28da61941dfe1b946925c2c09303becbb7fae2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3421, "license_type": "no_license", "max_line_length": 126, "num_lines": 98, "path": "/classes.py", "repo_name": "chrisowensdev/terminal-kombat", "src_encoding": "UTF-8", "text": "import random\nimport time\nfrom pygame import mixer\n\nmixer.init()\npunch_se = mixer.Sound(\"audio/Punch.wav\")\nkick_se = mixer.Sound(\"audio/Kick.wav\")\nspecial_se = mixer.Sound(\"audio/rage_of_blades.wav\")\n\n\nclass Character():\n def __init__(self, name, health, punch_power, kick_power, special_power, special_name, defense, sex):\n self.name = name\n self.health = health\n self.punch_power = punch_power\n self.kick_power = kick_power\n self.special_power = special_power\n self.special_name = special_name\n self.defense = defense\n self.sex = sex\n\n def __str__(self):\n return \"\"\"\n Name: %s\n Health: %s\n Punch Power: %s\n Kick Power: %s\n Special Power: %s (%d)\n Defense: %s\n \"\"\" % (self.name, self.health, self.punch_power, self.kick_power, self.special_name, self.special_power, self.defense)\n\n def kick(self, opponent):\n if self.kick_power == \"low\":\n kick = random.randint(1, 3)\n if self.kick_power == \"medium\":\n kick = random.randint(4, 6)\n if self.kick_power == \"high\":\n kick = random.randint(7, 9)\n defense = opponent.add_defense()\n if defense > kick:\n defense = kick\n opponent.health -= (kick - defense)\n mixer.Sound.play(kick_se)\n time.sleep(1)\n print(\"\\n\\n%s kicked for %d damage to %s.\" %\n (self.name, kick, opponent.name))\n print(\"%s blocked with %d defense and has %d health left.\" %\n (opponent.name, defense, opponent.health))\n\n def punch(self, opponent):\n if self.punch_power == \"low\":\n punch = random.randint(1, 3)\n if self.punch_power == \"medium\":\n punch = random.randint(4, 6)\n if self.punch_power == \"high\":\n punch = random.randint(7, 9)\n defense = opponent.add_defense()\n if defense > punch:\n defense = punch\n opponent.health -= (punch - defense)\n mixer.Sound.play(punch_se)\n time.sleep(1)\n print(\"\\n\\n%s punched for %d damage to %s.\" %\n (self.name, punch, opponent.name))\n print(\"%s blocked with %d defense and has %d health left.\" %\n (opponent.name, defense, opponent.health))\n\n def special(self, opponent):\n defense = opponent.add_defense()\n if defense > self.special_power:\n defense = self.special_power\n opponent.health -= (self.special_power - defense)\n mixer.Sound.play(special_se)\n time.sleep(4)\n print(\"\\n\\n%s used %s for %d damage to %s.\" %\n (self.name, self.special_name, self.special_power, opponent.name))\n print(\"%s blocked with %d defense and has %d health left.\" %\n (opponent.name, defense, opponent.health))\n\n def rand_attack(self, opponent):\n random_selection = random.randint(1, 3)\n if random_selection == 1:\n self.punch(opponent)\n if random_selection == 2:\n self.kick(opponent)\n if random_selection == 3:\n self.special(opponent)\n\n def is_alive(self):\n return self.health > 0\n\n def add_defense(self):\n if self.defense == \"low\":\n return random.randint(1, 3)\n if self.defense == \"medium\":\n return random.randint(4, 6)\n if self.defense == \"high\":\n return random.randint(7, 9)\n" }, { "alpha_fraction": 0.7566995620727539, "alphanum_fraction": 0.758109986782074, "avg_line_length": 53.53845977783203, "blob_id": "7c15bb8fca0c15050ad758793762ad5c38d777c7", "content_id": "e4d71dff286d8713adce4c5ddf971afab0673f3d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1418, "license_type": "no_license", "max_line_length": 789, "num_lines": 26, "path": "/README.md", "repo_name": "chrisowensdev/terminal-kombat", "src_encoding": "UTF-8", "text": "# TERMINAL KOMBAT - Python Project\n\nTerminal Kombat is a fighting role-playing-game using the python programming language.\n\n![intro_screen](/images/intro_screen.png)\n\n## Team members\n\n- <a href=\"https://github.com/JustinSGardner\">Justin Gardner</a>\n- <a href=\"https://github.com/rynoschni\">Ryan Schniederjan</a>\n- <a href=\"https://github.com/brittani-ericksen\">Brittani Ericksen</a>\n- <a href=\"https://github.com/chrisowensdev\">Chris Owens</a>\n\n## Gameplay\n\nA player chooses a character from a list on 9 characters to battle the remaining 8. The player will then battle an opponent until either the player's health or the opponent's health has reached zero. The player has the option to choose a kick attack, a punch attack, or a special attack which is unique to each character to try and reduce the level of the opponents health. Once the player has taken a turn, the computer opponent will attack the player with a random attack of its own. Both the player and computer opponent have a level of defense against the attacks. Characters have different levels of attacks and defense that are shown at the selection screen between low, medium, and high. If the player survives the battle with the first opponent they will move on to the next round.\n\n![character_selection](/images/character_selection.png)\n\n![story](/images/story.png)\n\n![begin_fight](/images/begin_fight.png)\n\n![victory](/images/victory.png)\n\n![yourdead](/images/youre_dead.png)\n" }, { "alpha_fraction": 0.5468106865882874, "alphanum_fraction": 0.5599279999732971, "avg_line_length": 25.813793182373047, "blob_id": "52d26f291933c52e15da54125e05835b2d313922", "content_id": "5d0eb2e0b9e7b16c2495ea915045d99653d6956b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3888, "license_type": "no_license", "max_line_length": 108, "num_lines": 145, "path": "/main.py", "repo_name": "chrisowensdev/terminal-kombat", "src_encoding": "UTF-8", "text": "import random\nimport time\nfrom text import story, title, fight_text, game_over, fatality, loading, ending_story, choose\nfrom classes import Character\nfrom functions import player_selection, ending, character_list, sound, player_defeated, opponent_dead_action\nimport sys\n\n# Music\nfrom pygame import mixer\nmixer.init()\nmixer.music.load(\"audio/TK_Intro_2.wav\")\nmixer.music.play(-1)\n\n\ndef keep_playing():\n while len(opponent_list) >= 1:\n keep_playing = input(\"Do you want to keep fighting? (y or n) \")\n if keep_playing == 'y':\n player.health = 50\n print(\"\\nYou have absorbed power from your opponent!\")\n print(\"You're back to Full Health: %d \\n\" % (player.health))\n print(\"Your next opponent is: %s\" % (opponent_list[0]))\n play_list.pop(0)\n\n for num in play_list:\n if num == play_list[0]:\n sound(\"round\" + num + \".wav\")\n\n time.sleep(2)\n fight_text()\n sound(\"fight.wav\")\n fight()\n elif keep_playing == 'n':\n print(\"Quitters never win!\\n\\n\")\n sound(\"laugh.wav\")\n game_over()\n sys.exit(0)\n else:\n sound(\"gong.wav\")\n print(\"Typing is hard, yo!\\n\")\n\n\ndef attack(type, opponent_list):\n # Player attacks opponent\n type(opponent_list[0])\n # mixer.Sound.play(special_se)\n time.sleep(1.5)\n if opponent_list[0].is_alive() == False:\n opponent_dead_action(opponent_list)\n if len(opponent_list) >= 1:\n keep_playing()\n play_list.pop(0)\n else:\n ending()\n\n\n# Game Fight function\ndef fight():\n\n while opponent_list[0].health > 0 and player.health > 0:\n if opponent_list[0].health < 15:\n if opponent_list[0].sex == \"F\":\n sound(\"finish_her.wav\")\n else:\n sound(\"finish_him.wav\")\n print(\"\\nWhat do you want to do?\")\n print(\"1. Kick\")\n print(\"2. Punch\")\n print(\"3. %s\" % (player.special_name))\n print(\"4. Flee\")\n print(\">>> \",)\n user_input = input()\n# Kick\n if user_input == \"1\":\n attack(player.kick, opponent_list)\n# Punch\n elif user_input == \"2\":\n attack(player.punch, opponent_list)\n# Special\n elif user_input == \"3\":\n attack(player.special, opponent_list)\n\n# RUN AWAY!!!!\n elif user_input == \"4\":\n print(\"QUITTERS NEVER WIN!\")\n sound(\"laugh.wav\")\n time.sleep(3)\n sys.exit(0)\n else:\n sound(\"gong.wav\")\n print(\n \"Your keyboard skills need some work! You missed your chance to attack!\\n\")\n time.sleep(1.5)\n# Computer ATTACKS!\n if player.health > 0:\n # Opponent attacks player\n opponent_list[0].rand_attack(player)\n if player.is_alive() == False:\n player_defeated(player)\n\n\n # print title screen\ntitle()\n\n\ntime.sleep(2)\nsound(\"gong.wav\")\ninput(\"Press enter to continue\\n \\n \\n\")\n\nchoose()\nplayer = player_selection()\ncharacter_list = character_list()\nopponent_list = []\nfor character in character_list:\n if player != character:\n opponent_list.append(character)\n\n\nplay_list = ['1', '2', '3', '4', '5', '6', '7', '8']\n\n# when user selects a character, it moves remaining characters to opponents list for battle\n\n\nprint(\"You have choosen %s\" % (player))\n\n\nsound(\"excellent.wav\")\nprint()\ntime.sleep(1)\nstory()\ntime.sleep(3)\nsound(\"test_your_luck.wav\")\nready = input(\"\\nAre you ready to fight? (y or n) \")\nif ready == \"y\":\n print(\"\\nGET READY!\\n\")\n sound(\"round1.wav\")\nelse:\n print(\"\\nToo bad! Time to fight!\\n\")\n sound(\"laugh.wav\")\n\nprint(\"\\nYour first opponent is: %s\" % (opponent_list[0]))\ntime.sleep(2)\nfight_text()\nsound(\"fight.wav\")\nfight()\n" }, { "alpha_fraction": 0.4960901439189911, "alphanum_fraction": 0.5381784439086914, "avg_line_length": 32.96875, "blob_id": "10ffb0f6ad587505e68b3874a772fabeef15a611", "content_id": "0fd6bae9c6d9efbdfadf035d2f7de4111d258699", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4348, "license_type": "no_license", "max_line_length": 119, "num_lines": 128, "path": "/functions.py", "repo_name": "chrisowensdev/terminal-kombat", "src_encoding": "UTF-8", "text": "import time\nimport sys\nfrom pygame import mixer\nfrom classes import Character\nfrom text import victory, ending_story, fatality\n\n\nmixer.init()\n\n\n# Character Functions and Definitions\n\ncharacter1 = Character(\"K. Relly\", 50, \"high\", \"high\",\n 30, \"Acid Drool\", \"low\", \"M\")\ncharacter2 = Character(\"Charg'n Ryno\", 50, \"medium\", \"low\",\n 30, \"Gor'n Horn Of Pain\", \"medium\", \"M\")\ncharacter3 = Character(\"NeckBreakin Brit\", 50, \"low\",\n \"high\", 30, \"Roundhouse Kick To The Face\", \"high\", \"F\")\ncharacter4 = Character(\"Snake Jodgel\", 50, \"high\",\n \"medium\", 30, \"Eye Gouge\", \"low\", \"M\")\ncharacter5 = Character(\"Ron Sheid\", 50, \"low\", \"low\",\n 30, \"Bitch Slap\", \"high\", \"M\")\ncharacter6 = Character(\"Justin\", 50, \"high\", \"low\", 30,\n \"Words Of Fury\", \"medium\", \"M\")\ncharacter7 = Character(\"Cave Dolòn\", 50, \"high\", \"low\",\n 30, \"Nutcracker Choke\", \"high\", \"M\")\ncharacter8 = Character(\"Crazyeyes Chris\", 50, \"high\",\n \"medium\", 30, \"Stare Of Death\", \"medium\", \"M\")\ncharacter9 = Character(\"Yelrac Zil\", 50, \"high\", \"high\",\n 30, \"Teleport & Attack From Behind\", \"high\", \"F\")\n\n\ndef character_list():\n return [character1, character2, character3, character4, character5, character6, character7, character8, character9]\n\n\ndef print_character_menu(pos1, char1, pos2, char2, pos3, char3):\n print(\"-\" * 110)\n print(\"{:<10}|| {:<30}|| {:<30}|| {:<30}|\".format(\n \"Name:\", (pos1 + char1.name), (pos2 + char2.name), (pos3 + char3.name)))\n print(\"{:<10}|| {:<30}|| {:<30}|| {:<30}|\".format(\n \"Health:\", char1.health, char2.health, char3.health))\n print(\"{:<10}|| {:<30}|| {:<30}|| {:<30}|\".format(\n \"Punch:\", char1.punch_power.title(), char2.punch_power.title(), char3.punch_power.title()))\n print(\"{:<10}|| {:<30}|| {:<30}|| {:<30}|\".format(\n \"Kick:\", char1.kick_power.title(), char2.kick_power.title(), char3.kick_power.title()))\n print(\"{:<10}|| {:<30}|| {:<30}|| {:<30}|\".format(\n \"Defense:\", char1.defense.title(), char2.defense.title(), char3.defense.title()))\n print(\"{:<10}|| {:<30}|| {:<30}|| {:<30}|\".format(\n \"Special:\", char1.special_name, char2.special_name, char3.special_name))\n print(\"-\" * 110)\n\n\ndef player_selection():\n print_character_menu(\"1. \", character1, \"2. \",\n character2, \"3. \", character3)\n print_character_menu(\"4. \", character4, \"5. \",\n character5, \"6. \", character6)\n print_character_menu(\"7. \", character7, \"8. \",\n character8, \"9. \", character9)\n\n# Looping user input to choose character\n while True:\n character_choice = input(\"Who will it be? (1-9) \")\n if character_choice == \"1\":\n player = character1\n return player\n elif character_choice == \"2\":\n player = character2\n return player\n elif character_choice == \"3\":\n player = character3\n return player\n elif character_choice == \"4\":\n player = character4\n return player\n elif character_choice == \"5\":\n player = character5\n return player\n elif character_choice == \"6\":\n player = character6\n return player\n elif character_choice == \"7\":\n player = character7\n return player\n elif character_choice == \"8\":\n player = character8\n return player\n elif character_choice == \"9\":\n player = character9\n return player\n else:\n sound(\"gong.wav\")\n print(\"Typing is hard, yo!\")\n\n\n# Gameplay Functions\n\ndef sound(file):\n sound = mixer.Sound(\"audio/%s\" % file)\n return mixer.Sound.play(sound)\n\n\ndef opponent_dead_action(opponent_list):\n print(\"%s is dead.\\n\" % (opponent_list[0].name))\n print(\"\")\n victory()\n sound(\"impressive.wav\")\n opponent_list.pop(0)\n\n\ndef player_defeated(player):\n print(\"%s is dead.\\n\\n\" % (player.name))\n fatality()\n sound(\"fatality.wav\")\n time.sleep(1.5)\n print(\"Better luck next time, chump.\")\n sound(\"laugh.wav\")\n time.sleep(5)\n sys.exit(0)\n\n\ndef ending():\n time.sleep(1.5)\n ending_story()\n sound(\"flawless_victory.wav\")\n time.sleep(5)\n sys.exit(0)\n" } ]
4
thomasmbranch/dnd
https://github.com/thomasmbranch/dnd
15aaff88bc37cfb8bddd91293d9a67a39d653364
c54c0f206a8469cf9b82dfb1ad7d4ad0cf7d4077
778481bbd640f45e7b53d67a1a38b5473c416fc1
refs/heads/master
2022-08-24T22:22:26.642506
2016-12-09T18:13:15
2016-12-09T18:13:15
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5470386743545532, "alphanum_fraction": 0.5621634721755981, "avg_line_length": 43.80717468261719, "blob_id": "4d688bf56930e52fa74b261b2c4ab2141ea66b57", "content_id": "299e47fbed4879eb86ee846189a3a40824ed3dae", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 20430, "license_type": "permissive", "max_line_length": 120, "num_lines": 446, "path": "/dnd.py", "repo_name": "thomasmbranch/dnd", "src_encoding": "UTF-8", "text": "############################################\r\n# Thomas Lehman-Borer & Rachel Chamberlain #\r\n# Dungeons and Dragons Facilitator #\r\n# CS 1 Final Project #\r\n############################################\r\n\r\nfrom random import shuffle\r\n\r\nchars = {}\r\nmonst = {}\r\n\r\nclass SentientBeing:\r\n '''This class is a parent class to Character and Monster. It holds simple methods common to both subclasses,\r\n such as basic getters for health and experience, and basic setters like changing health and armor. In addition,\r\n this class has the methods minForHit and attack, where the former is only used in the latter and thus is private.'''\r\n ### CONSTRUCTOR ###\r\n def __init__(self, name, experience, health, species, attacks, armor):\r\n '''Initialize common attributes and create a dictionary of functions\r\n that can be used in the combat function (near the end of the file).'''\r\n \r\n self.__name = name # string\r\n self.__experience = experience # float\r\n self.__health = health # list with two items -> [current, max]\r\n self.__species = species # string\r\n self.attacks = attacks # dictionary\r\n self.__armor = armor # integer\r\n self.combatDict = {'attack': self.attack,\r\n 'changeHealth': self.changeHealth,\r\n 'getHealth': self.getHealth,\r\n 'setMaxHealth': self.setMaxHealth,\r\n 'getArmor': self.getArmor,\r\n 'setArmor': self.setArmor,\r\n 'getExp': self.getExp,\r\n 'addExp': self.addExp,\r\n 'getName': self.getName,\r\n 'getSpecies': self.getSpecies}\r\n\r\n ### GETTERS ###\r\n # These are the simple getters of attributes instances of the sentient being class.\r\n def getHealth(self):\r\n return self.__health\r\n\r\n def getArmor(self):\r\n return self.__armor\r\n\r\n def getExp(self):\r\n return self.__experience\r\n\r\n def getSpecies(self):\r\n return self.__species\r\n\r\n def getName(self):\r\n return self.__name\r\n\r\n ### SETTERS ###\r\n # These are setter methods to modify attributes of the SentientBeing (SB) instances.\r\n def changeHealth(self, change):\r\n ''' This method takes the parameter change which is then added to the current health of the being.\r\n To decrease the current health, simply enter a negative integer.'''\r\n current = self.__health[0]\r\n maximum = self.__health[1]\r\n # Define if/else statements to make sure the health doesn't go below zero or above the max\r\n if current + int(change) < 0:\r\n self.__health[0] = 0\r\n elif current + int(change) > maximum:\r\n self.__health[0] = maximum\r\n else:\r\n self.__health[0] += int(change)\r\n\r\n def setMaxHealth(self, val):\r\n '''This method changes the max amount of health a being can have and decreases the current\r\n if it becomes greater than the max when changing the max.'''\r\n self.__health[1] = int(val)\r\n if self.__health[0] > self.__health[1]:\r\n self.__health[0] = self.__health[1]\r\n\r\n def addExp(self, change):\r\n '''This method adds to the experience, which is a float'''\r\n self.__experience += float(change)\r\n\r\n def setArmor(self, newArm):\r\n '''This method sets the armor class of the being.\r\n Note: smaller values for the armor class equates to better armor.'''\r\n if newArm > 0 and newArm < 10:\r\n self.__armor = newArm\r\n else: # else statement to make sure the armor class is in the right range.\r\n print(\"You must enter an integer from 1 to 9\")\r\n\r\n ### OTHERS ###\r\n def __str__(self):\r\n '''Overload function so that printing the SB instance gives only the name\r\n (and not a gibberish pointer).'''\r\n return self.__name\r\n\r\n def __bool__(self):\r\n '''This overload is mainly for knowing when a SB instance is dead (current health = 0).\r\n It is used in combat to know when to remove them from the appropriate lists/dictionary.'''\r\n if self.__health[0] == 0:\r\n return False\r\n else:\r\n return True\r\n\r\n def __minForHit(self, being, attRoll):\r\n '''This private function determines what the minimum die roll is in order to be able to hit the\r\n SB. This is its own function because we need to calculate this with armor (for all SB) and level for\r\n Characters and experience for Monsters. This function starts as an if/else statement to know which\r\n calculation method to use.'''\r\n if isinstance(self, Character):\r\n dArmor = being.getArmor()\r\n # good for character levels 1-3; information for higher levels to come later\r\n # armor : minimum roll\r\n table = {9: 10, 8: 11, 7: 12, 6: 13, 5: 14, 4: 15, 3: 16, 2: 17}\r\n\r\n return table[dArmor] # returns the min roll value for the character\r\n\r\n elif isinstance(self, Monster):\r\n dArmor = being.getArmor()\r\n\r\n # splitting something formatted like '3d6 + 1' into [[3,6],1]\r\n # works without modifier or with negative modifier\r\n if '+' not in attRoll and '-' not in attRoll:\r\n attRoll += '+0'\r\n attRoll = attRoll.split('+')\r\n elif '+' in attRoll:\r\n attRoll = attRoll.split('+')\r\n elif '-' in attRoll:\r\n attRoll = attRoll.split('-')\r\n attRoll[1] = int(attRoll[1])\r\n attRoll[0] = attRoll[0].split('d')\r\n attRoll[0][0] = int(attRoll[0][0])\r\n attRoll[0][1] = int(attRoll[0][1])\r\n\r\n numDice = attRoll[0][0]\r\n\r\n if numDice >= 11:\r\n table = {9: 0, 8: 1, 7: 2, 6: 3, 5: 4, 4: 5, 3: 6, 2: 7}\r\n elif numDice in [9, 10]:\r\n table = {9: 2, 8: 3, 7: 4, 6: 5, 5: 6, 4: 7, 3: 8, 2: 9}\r\n elif numDice in [7, 8]:\r\n table = {9: 4, 8: 5, 7: 6, 6: 7, 5: 8, 4: 9, 3: 10, 2: 11}\r\n elif (numDice in [5, 6]) or (numDice == 4 and attRoll[1] > 0):\r\n table = {9: 5, 8: 6, 7: 7, 6: 8, 5: 9, 4: 10, 3: 11, 2: 12}\r\n elif numDice == 4 or (numDice == 3 and attRoll[1] > 0):\r\n table = {9: 6, 8: 7, 7: 8, 6: 9, 5: 10, 4: 11, 3: 12, 2: 13}\r\n elif numDice == 3 or (numDice == 2 and attRoll[1] > 0):\r\n table = {9: 8, 8: 9, 7: 10, 6: 11, 5: 12, 4: 13, 3: 14, 2: 15}\r\n elif numDice == 2 or attRoll[1] > 1:\r\n table = {9: 9, 8: 10, 7: 11, 6: 12, 5: 13, 4: 14, 3: 15, 2: 16}\r\n else:\r\n table = {9: 10, 8: 11, 7: 12, 6: 13, 5: 14, 4: 15, 3: 16, 2: 17}\r\n\r\n return table[dArmor] # # returns the min roll value for the monster\r\n\r\n def attack(self, being):\r\n '''This method is how characters attack other characters. It takes the param of the being to be attacked\r\n (being, \"the target\") and used attacks info from the attacker (self) to ask the user what attack to use.\r\n The user gets to roll the die, but the function tells the user how they should calculate the damage\r\n done by the attack and the user just tells the function what the result is. This function tells you if an\r\n attack hits the target and, if it does, it deducts from the health automatically.'''\r\n\r\n # If statement to make sure that being, not an instance of a SentientBeing subclass,\r\n # becomes one or is labeled as not attackable. When using combat, a string is passed as being\r\n # so we must account for that by pulling the value from the key's respective dictionary.\r\n if not isinstance(being, SentientBeing):\r\n if being in chars:\r\n being = chars[being]\r\n elif being in monst:\r\n being = monst[being]\r\n else:\r\n print(\"This is not a valid being to attack\")\r\n\r\n # Interact with the user by asking which attack in the attacker's (self's) attack dictionary\r\n # the user wants to use. We made it possible for the user to only type a partial string and\r\n # get the attack from there. If there are more than one attacks with the partial string, it will\r\n # ask the user to enter the full name.\r\n print('What is the attack of choice?')\r\n print(self.attacks)\r\n possibilities = 0\r\n attack = input(' >> ')\r\n while attack not in self.attacks:\r\n fullName = ''\r\n for a in self.attacks:\r\n if attack in a:\r\n possibilities += 1\r\n fullName = a\r\n if possibilities == 1:\r\n attack = fullName\r\n elif possibilities > 1:\r\n print('Which attack did you mean?')\r\n attack = input(' >> ')\r\n else:\r\n print('That is not an available attack.')\r\n attack = input(' >> ')\r\n\r\n # The user enters the results of the hit die, or if the attack will land on the target.\r\n hitDie = int(input('What is the result of a 1d20 roll? '))\r\n\r\n # If it does, then ask what the results of the damage roll is and change the health of the\r\n # target accordingly.\r\n if self.__minForHit(being, self.attacks[attack]) <= hitDie:\r\n attDie = int(input('What is the result of a ' +\r\n self.attacks[attack] + ' roll? '))\r\n being.changeHealth(-attDie)\r\n\r\n if being.getHealth()[0] != 0:\r\n print('The health of', being.getName(), 'is now', being.getHealth())\r\n else:\r\n print('You have slain', being.getName() + '.')\r\n # case for the target evading\r\n elif hitDie < 10:\r\n print(being.getName(), 'evades the attack.')\r\n # Case for the armor blocking the attack.\r\n else:\r\n print(\"The attack is blocked by the defender's armor.\")\r\n\r\n########################################################################################################################\r\n\r\nclass Character(SentientBeing):\r\n '''This is a subclass of SentientBeing which adds attributes and methods which aren't used in Monster.\r\n It all the methods of its parent class and adds attributes and getters/setters for money and level\r\n and now the character has a player name.'''\r\n ### CONSTRUCTOR ###\r\n def __init__(self, name, player, level, experience, health, species, armor, money, attacks):\r\n self.__player = player\r\n self.__level = level\r\n self.__money = money\r\n super().__init__(name, experience, health, species, attacks, armor)\r\n self.combatDict['playerName'] = self.playerName\r\n self.combatDict['getLevel'] = self.getLevel\r\n self.combatDict['lvlUp'] = self.lvlUp\r\n\r\n if name not in chars:\r\n chars[name] = self\r\n\r\n ### GETTERS ###\r\n # the basic getters for the subclass\r\n def getMoney(self):\r\n # returns the list of the money to the terminal\r\n return self.__money\r\n\r\n def playerName(self):\r\n return self.__player\r\n\r\n def getLevel(self):\r\n return self.__level\r\n\r\n ### SETTERS ###\r\n def lvlUp(self):\r\n print('Any level-dependent attacks must be changed manually.')\r\n self.__level += 1\r\n\r\n########################################################################################################################\r\n\r\nclass Monster(SentientBeing):\r\n '''This is a subclass of SentientBeing and has no unique attributes but it's easier to keep track of being types \r\n with the additonal subclass. Future plans include subclasses of Monster.'''\r\n ### CONSTRUCTOR ###\r\n def __init__(self, name, experience, health, species, attacks, armor):\r\n super().__init__(name, experience, health, species, attacks, armor)\r\n\r\n if name not in monst:\r\n monst[name] = self\r\n\r\n########################################################################################################################\r\n\r\ndef newChar():\r\n '''newChar is a function to make a new character in the game. It asks you step-by-step for attributes and\r\n constructs the Character object based on responses. Attacks will be added directly the attacks dictionary\r\n for the Character. This function returns the Character object, so use [name] = newChar() to get\r\n your new character, where [name] is replaced with the actual name of the character.'''\r\n \r\n print(\"Begin new character construction.\\n\")\r\n\r\n name = input('What is the character name? ').replace(\" \", \"\")\r\n player = input(\"What is the player name? \")\r\n\r\n level = int(input(\"What level is the character? \"))\r\n experience = float(input(\"How much experience does the character have? \"))\r\n health1 = int(input(\"What is the character's max health? \"))\r\n health0 = int(input(\"What is the character's current health? \"))\r\n health = [health0, health1]\r\n\r\n species = input(\"What species is the character? \")\r\n armor = int(input(\"What is the character's armor class? \"))\r\n\r\n print('How much of each of these monetary denominations does the character have?')\r\n plat = input('Platinum: ')\r\n gold = input('Gold: ')\r\n silv = input('Silver: ')\r\n copp = input('Copper: ')\r\n elec = input('Electrum: ')\r\n money = [int(plat), int(gold), int(silv), int(copp), int(elec)]\r\n\r\n attacks = {}\r\n print(\"\\nYou will need to set your attacks separately.\")\r\n return Character(name, player, level, experience, health, species, armor, money, attacks)\r\n\r\n\r\ndef newMonster():\r\n '''This function is very similar to newChar only it makes Monster objects. It is designed and used in the \r\n same way (user input for each attribute and the function constructs and returns the Monster object.'''\r\n \r\n print(\"Begin new monster construction.\\n\")\r\n name = input('What shall we call the monster? ').replace(\" \", \"\")\r\n species = input(\"What is the species of this monster? \")\r\n experience = float(input(\"What is the experience of this monster? \"))\r\n health1 = int(input(\"What is the max health of this monster? \"))\r\n health0 = health1\r\n health = [health0, health1]\r\n armor = int(input(\"What armor class does the monster have? \"))\r\n attacks = {}\r\n print(\"\\nYou will need to set the monsters attacks separately.\")\r\n return Monster(name, experience, health, species, attacks, armor)\r\n\r\n\r\ndef save(chars):\r\n '''This function saves current Character data to a .txt file. It writes one Character object per line\r\n and separates the attributes with a colon (':'). The load function below reads the save file format.'''\r\n filename = input('Filename: ')\r\n fh = open(filename, 'w')\r\n\r\n fh.write('CHARS\\n')\r\n for char in chars.values():\r\n attackKeys = ''\r\n attackVals = ''\r\n for key in char.attacks:\r\n attackKeys += key + ','\r\n attackVals += char.attacks[key] + ','\r\n attacks = attackKeys[:-1] + ';' + attackVals[:-1]\r\n\r\n attributes = [str(char), char.playerName(), str(char.getLevel()),\r\n str(char.getExp()), str(char.getHealth())[1:-1],\r\n char.getSpecies(), str(char.getArmor()),\r\n str(char.getMoney())[1:-1], attacks]\r\n fh.write(':'.join(attributes) + '\\n')\r\n\r\n fh.write('ENDCHARS\\n')\r\n\r\n fh.close()\r\n print('Character data saved.')\r\n\r\n\r\ndef load():\r\n \"\"\"This function reads in from a save file (.txt) and returns a dictionary of character\r\n objects. For ease of use, user should say 'chars = load()'. Once the file is loaded, the user will have\r\n to set each character object to a variable(ie: >>> bernie = chars['Bernie'])\"\"\"\r\n characters = {}\r\n\r\n filename = input('Filename: ')\r\n fh = open(filename, 'r')\r\n\r\n fh.readline() # first line of file(CHARS)\r\n\r\n line = fh.readline().strip(\"\\n\") # reads the first character and constructs it in the while loop\r\n while 'ENDCHARS' not in line: # for each character\r\n # formatting into desired types\r\n args = line.split(':')\r\n args[2] = int(args[2]) # level\r\n args[3] = float(args[3]) # experience\r\n\r\n args[4] = args[4].split(',') # health\r\n for i in range(len(args[4])):\r\n args[4][i] = int(args[4][i])\r\n\r\n args[6] = int(args[6]) # armor\r\n\r\n args[7] = args[7].split(',') # money\r\n for i in range(len(args[7])):\r\n args[7][i] = int(args[7][i])\r\n\r\n attacks = args[8].split(';') # attacks dictionary\r\n attackKeys = attacks[0].split(',')\r\n attackVals = attacks[1].split(',')\r\n args[8] = dict(zip(attackKeys, attackVals))\r\n\r\n # remove the occasional extra empty key\r\n if '' in args[8]:\r\n del args[8]['']\r\n\r\n # add the Character name to the dictionary as the key and the Character object as the value.\r\n characters[args[0]] = Character(*args)\r\n # Read the next line to either add another character to the dictionary or stop the while loop\r\n # if the line is \"ENDCHARS\"\r\n line = fh.readline().strip(\"\\n\")\r\n\r\n fh.close()\r\n return characters # return the dictionary\r\n\r\n\r\ndef combat(Chars, Monst):\r\n '''This function was the biggest pain in the butt. It takes the parameters of the character list chars \r\n and the monster list monst and randomly makes an order in which the combatants get their turn.\r\n During their turn, they can choose from a number of functions in the combatDict defined in SB.\r\n type \"next\" to end the turn and otherwise type the function you want to execute. If the function\r\n takes parameters, just type a space between the funciton and the parameter and the combat function interprets.\r\n Current note: even if one side is totally dead, because of the for loop, it must finish going through the\r\n combatants before a winner is declared. (Maybe not still the case??)'''\r\n \r\n charList = list(Chars.values())\r\n monsList = list(Monst.values())\r\n\r\n combatants = charList + monsList\r\n\r\n shuffle(combatants) # shuffle the list for a random order\r\n\r\n print(\"\\nThe order is:\")\r\n for com in combatants:\r\n print('\\t' + str(com)) # print the order of the turns\r\n\r\n # Make a while loop to keep going until one list (monsList or charList) is empty.\r\n while monsList != [] and charList != []:\r\n for com in combatants: # use a for loop to go through each SB instance in the combatants list.\r\n print('\\n' + str(com).upper())\r\n\r\n continuing = True\r\n while continuing: # make a while loop to keep the user's turn going until they type 'next'\r\n action = input(\"What does the combatant do? When done with turn, type 'next' to continue.\\n >> \")\r\n functs = action.split()\r\n if functs[0] == 'next':\r\n continuing = False\r\n elif functs[0] not in com.combatDict:\r\n print(\"This is not a valid action.\")\r\n elif len(functs) > 1:\r\n com.combatDict[functs[0]](*functs[1:]) # execute the function with parameters\r\n else: # if len(functs) == 1\r\n if 'get' == functs[0][:3]: # execute the getter functions without parameters and print the return\r\n print(com.combatDict[functs[0]]())\r\n else:\r\n com.combatDict[functs[0]]() # execute the functions without parameters.\r\n\r\n # When something dies in combat, it's off all lists.\r\n for char in combatants:\r\n if not bool(char):\r\n combatants.remove(char)\r\n if char in charList:\r\n charList.remove(char)\r\n del chars[char.getName()]\r\n elif char in monsList:\r\n monsList.remove(char)\r\n del monst[char.getName()]\r\n\r\n # when one list is empty, it declared a winner.\r\n if charList == []:\r\n print(\"Battle is over. The winner is the monsters.\")\r\n elif monsList == []:\r\n print(\"Battle is over. The winner is the characters.\")\r\n" } ]
1
HiasobiUtsuro/Laba01
https://github.com/HiasobiUtsuro/Laba01
6d893987a12e88a2863942c54515cb0a22b955af
7bcfc0b63e3ee5c917b8a6e33898da78cacec6b9
6a66ce3704cfd41a978139e918fc0abaf964fb02
refs/heads/main
2023-08-22T17:33:03.025101
2021-10-14T16:10:18
2021-10-14T16:10:18
417,196,069
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5735294222831726, "alphanum_fraction": 0.5735294222831726, "avg_line_length": 33, "blob_id": "bef7c56cf03e62faa2c5f624d80d526ed29c65ae", "content_id": "1570139fb14ed0ac1b12bf0c293eb8bb54bc6261", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 82, "license_type": "no_license", "max_line_length": 34, "num_lines": 2, "path": "/main.py", "repo_name": "HiasobiUtsuro/Laba01", "src_encoding": "UTF-8", "text": "A = input (\"Введите ваше имя: \", )\nprint ('Hello, my name is ' + A)\n" } ]
1
shainaboover/Lambdata_sboov
https://github.com/shainaboover/Lambdata_sboov
72a5bae0716f1e4892eed13e2cd01c1253c4ef8f
894984c5ca8a0053aa2a7dcdda3e79d0563d9c11
82238001361b1bccadc2390afa8768c94eb253e9
refs/heads/main
2023-01-12T07:19:36.228223
2020-11-11T19:52:42
2020-11-11T19:52:42
307,878,673
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.8309859037399292, "alphanum_fraction": 0.8309859037399292, "avg_line_length": 34.5, "blob_id": "a1494e36b2b79d715ed40f72b454404e1dd59b86", "content_id": "553c9256b543b492d8e565fd30f41d468209f27f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 71, "license_type": "permissive", "max_line_length": 53, "num_lines": 2, "path": "/README.md", "repo_name": "shainaboover/Lambdata_sboov", "src_encoding": "UTF-8", "text": "# Lambdata_sboov\nA collection of useful data science utility functions\n" }, { "alpha_fraction": 0.6699029207229614, "alphanum_fraction": 0.708737850189209, "avg_line_length": 19.399999618530273, "blob_id": "710a4545610cf4079d852a0879e44effd4c932c3", "content_id": "bc40de9e839b3c2da6a428d26a3aacf24a58a1b6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 103, "license_type": "permissive", "max_line_length": 31, "num_lines": 5, "path": "/lambdata_sboov/df_utils.py", "repo_name": "shainaboover/Lambdata_sboov", "src_encoding": "UTF-8", "text": "import pandas as pd\nimport numpy as np\n\nONES = pd.Series(np.ones(10))\nZEROS = pd.Series(np.zeros(20))\n\n" }, { "alpha_fraction": 0.7223974466323853, "alphanum_fraction": 0.7318611741065979, "avg_line_length": 23.384614944458008, "blob_id": "fa6f0aef628f6a08f05ad94398a062a563952597", "content_id": "61ab266b1cf3c39a53ab9b84ec059f3ed97067b2", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Dockerfile", "length_bytes": 317, "license_type": "permissive", "max_line_length": 63, "num_lines": 13, "path": "/Dockerfile", "repo_name": "shainaboover/Lambdata_sboov", "src_encoding": "UTF-8", "text": "FROM debian\n\n### So logging/io works reliably w/Docker\nENV PYTHONUNBUFFERED=1\n\n### Basic Python dev dependencies \nRUN apt-get update && \\\napt-get upgrade -y && \\\napt-get install python3-pip curl -y && \\\npip3 install pipenv\n\n### Install Lambdata package\nRUN pip install -i https://test.pypi.org/simple/ lambdata-sboov\n" }, { "alpha_fraction": 0.5791129469871521, "alphanum_fraction": 0.5816294550895691, "avg_line_length": 33.55434799194336, "blob_id": "763cbce14cfa201980c241fbb1a9e6489d5b29eb", "content_id": "9b5a187b0271610fdfd11f33f8bcd626981d850c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3179, "license_type": "permissive", "max_line_length": 92, "num_lines": 92, "path": "/lambdata_sboov/helper.py", "repo_name": "shainaboover/Lambdata_sboov", "src_encoding": "UTF-8", "text": "'''\nThis module contains a date_splitter function\nand MySplitter class which performs a train_val_test split\non a pandas DataFrame\n'''\n\nimport pandas as pd\nfrom IPython.display import display\nfrom sklearn.datasets import load_wine\nfrom sklearn.model_selection import train_test_split\n\n\n# Function to split dates (\"MM/DD/YYYY\", etc.) into multiple columns\ndef date_splitter(self, date_column_name):\n '''\n This function takes column with dates, coverts to pandas datetime\n format creates separate columns for day, month, and year\n '''\n\n self[date_column_name] = pd.to_datetime(\n self[date_column_name],\n infer_datetime_format=True)\n self['Year'] = self[date_column_name].dt.year\n self['Month'] = self[date_column_name].dt.month\n self['Day'] = self[date_column_name].dt.day\n self.drop(date_column_name, axis=1, inplace=True)\n return self\n\n\n# Train/validate/test split function for a dataframe\nclass MySplitter():\n '''\n This class performs train_val_test split on pandas dataframe\n and prints out summary statistics\n '''\n def __init__(self, dataframe):\n self.dataframe = df\n\n\n def train_val_test_split(self, features, target,\n train_size=0.7, val_size=0.1,\n test_size=0.2, random_state=None,\n shuffle=True):\n\n '''\n This function performs 3 way split using sklearn train_test_split\n '''\n\n X = df[features]\n y = df[target]\n\n X_train_val, X_test, y_train_val, y_test = train_test_split(\n X, y, test_size=test_size, random_state=random_state, shuffle=shuffle)\n\n X_train, X_val, y_train, y_val = train_test_split(\n X_train_val, y_train_val, test_size=val_size / (train_size + val_size),\n random_state=random_state, shuffle=shuffle)\n\n return X_train, X_val, X_test, y_train, y_val, y_test\n\n\n def print_split_summary(self, X_train, X_val, X_test):\n '''\n This function prints summary statistics for X_train, X_val, and X_test.\n '''\n\n print('######################## TRAINING DATA ########################')\n print(f'X_train Shape: {X_train.shape}')\n display(X_train.describe(include='all').transpose())\n print('')\n\n print('######################## VALIDATION DATA ######################')\n print(f'X_val Shape: {X_val.shape}')\n display(X_val.describe(include='all').transpose())\n print('')\n\n print('######################## TEST DATA ############################')\n print(f'X_test Shape: {X_test.shape}')\n display(X_test.describe(include='all').transpose())\n print('')\n\n\nif __name__ == '__main__':\n raw_data = load_wine()\n df = pd.DataFrame(data=raw_data['data'], columns=raw_data['feature_names'])\n df['target'] = raw_data['target']\n\n\n # Test the MySplitter Class\n #splitter = MySplitter(df=df, features=['ash', 'hue'], target='target')\n #X_train, X_val, X_test, y_train, y_val, y_test = splitter.train_validation_test_split()\n #splitter.print_split_summary(X_train, X_val, X_test)\n" } ]
4
StarboyDH/Tic-Tac-Toe
https://github.com/StarboyDH/Tic-Tac-Toe
8230639fd6d3bcaae7e45608a2f477efcf630cd6
366ea6f8424ec87cf648cd3a2d907f7c5ee0c3f3
9165d40baab96dc462742473d3ee004bde15c5b7
refs/heads/main
2023-08-10T19:02:43.452145
2021-09-19T17:51:05
2021-09-19T17:51:05
408,194,359
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.2753623127937317, "alphanum_fraction": 0.31609871983528137, "avg_line_length": 23.08490562438965, "blob_id": "ad29c0b2d3785caaa4c37741bc6d8f11f8285da0", "content_id": "c6b4022ddb6c235bc7dc9a384e8f2f0c8c96beda", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5121, "license_type": "no_license", "max_line_length": 196, "num_lines": 212, "path": "/tic_tac_toe.py", "repo_name": "StarboyDH/Tic-Tac-Toe", "src_encoding": "UTF-8", "text": "import random\nimport time\n\ng = [\n [\"1\", \"2\", \"3\"],\n [\"4\", \"5\", \"6\"],\n [\"7\", \"8\", \"9\"]\n]\n\n\ndef block(b):\n index = {\n 1: [0, 0],\n 2: [0, 1],\n 3: [0, 2],\n 4: [1, 0],\n 5: [1, 1],\n 6: [1, 2],\n 7: [2, 0],\n 8: [2, 1],\n 9: [2, 2]\n }\n\n if g[index[b][0]][index[b][1]] != \"O\" and g[index[b][0]][index[b][1]]!= \"X\":\n g[index[b][0]][index[b][1]] = \"X\"\n return True\n else:\n return False\n \ndef bot():\n take_place = False\n #Easy Mode\n while take_place != True:\n a = random.randint(0,2)\n b = random.randint(0,2)\n\n if g[a][b] != \"X\" and g[a][b] != \"O\":\n g[a][b] = \"O\"\n take_place = True\n else:\n take_place = False \n\ndef third(ct):\n for num in \"123456789\":\n if num in ct:\n match num:\n case \"1\":\n g[0][0] = \"O\"\n case \"2\":\n g[0][1] = \"O\"\n case \"3\":\n g[0][2] = \"O\"\n case \"4\":\n g[1][0] = \"O\"\n case \"5\":\n g[1][1] = \"O\"\n case \"6\":\n g[1][2] = \"O\"\n case \"7\":\n g[2][0] = \"O\"\n case \"8\":\n g[2][1] = \"O\"\n case \"9\":\n g[2][2] = \"O\"\n\ndef bot_hard():\n #Hard Mode\n Xs = [\n g[0][:],\n g[1][:],\n g[2][:],\n [g[0][0], g[1][0], g[2][0]],\n [g[0][1], g[1][1], g[2][1]],\n [g[0][2], g[1][2], g[2][2]],\n [g[0][0], g[1][1], g[2][2]],\n [g[0][2], g[1][1], g[2][0]]\n ]\n\n isPlayed = False\n for ct in Xs:\n if ct.count(\"O\") == 2 and ct.count(\"X\") == 0:\n third(ct)\n isPlayed = True\n break\n elif ct.count(\"X\") == 2 and ct.count(\"O\") == 0:\n third(ct)\n isPlayed = True\n break\n else:\n isPlayed = False\n\n if isPlayed == False: \n if g[1][1] != \"X\" and g[1][1] != \"O\":\n g[1][1] = \"O\"\n else:\n c = []\n for j in range(0,3,2):\n for jj in range(0,3,2):\n if g[j][jj] != \"X\" and g[j][jj] != \"O\":\n c.append(g[j][jj])\n else:\n pass \n\n if len(c) > 0:\n match random.choice(c):\n case \"1\":\n g[0][0] = \"O\"\n case \"3\":\n g[0][2] = \"O\"\n case \"7\":\n g[2][0] = \"O\"\n case \"9\":\n g[2][2] = \"O\"\n else:\n bot()\n\ndef draw():\n w = [\n [' ', ' ', ' '],\n [' ', ' ', ' '],\n [' ', ' ', ' ']\n ]\n\n for d in range(0,3):\n for dd in range(0,3):\n if g[d][dd] == \"X\" or g[d][dd] == \"O\":\n w[d][dd] = g[d][dd]\n\n print(f\" {w[0][0]} | {w[0][1]} | {w[0][2]} \\n-----------------\\n {w[1][0]} | {w[1][1]} | {w[1][2]} \\n-----------------\\n {w[2][0]} | {w[2][1]} | {w[2][2]} \\n\") \n\n\nrun = False\nmode = int(input(\"Choose the mode of game (1) for easy mode and (2) for hard mode: \"))\nif 0 >= mode or mode >= 3:\n print(\"\\nYou should enter (1) or (2):\\n1 => Easy Mode\\n2 => Hard Mode\\n\\n**Please run the code again!**\")\n run = False\nelse:\n print(\"\\nSTART!!\\n\")\n run = True\n\n\nwin = \"\"\nrounds = 0\nwhile win == \"\":\n\n if run == False:\n break\n else:\n pass\n\n print(\"Your turn!\")\n x = int(input(\"Choose your block: \"))\n if block(x) == False:\n print(\"**You choosed a taken block**\")\n continue\n else:\n block(x)\n draw()\n\n time.sleep(0.5)\n print(\"Bot\\'s turn!\")\n match mode:\n case 1:\n bot()\n case 2:\n bot_hard()\n draw()\n\n win_x = \"\"\n win_y = \"\"\n win_z1 = \"\"\n win_z2 = \"\"\n for i in range(0,3):\n win_z1 += g[i][i]\n for j in range(0,3):\n win_x += g[i][j]\n win_y += g[j][i]\n win_z2 += g[i][2-i]\n\n if win_x[-3:] == \"XXX\" or win_x[-3:] == \"OOO\":\n win = win_x[-1]\n break\n elif win_y[-3:] == \"XXX\" or win_y[-3:] == \"OOO\":\n win = win_y[-1]\n break\n elif win_z1[-3:] == \"XXX\" or win_z1[-3:] == \"OOO\":\n win = win_z1[-1]\n break\n elif win_z2[:] == \"X\"*9 or win_z2[:] == \"O\"*9:\n win = win_z2[-1]\n break\n \n rounds += 2\n if rounds >= 8:\n match win:\n case \"X\":\n print(\"Yay! You Won 🥳\")\n break\n case \"O\":\n print(\"Opps! You Lost 😓\")\n break\n case _:\n print(\"Draw! 🤔\")\n break\n else:\n match win:\n case \"X\":\n print(\"Yay! You Won 🥳\")\n break\n case \"O\":\n print(\"Opps! You Lost 😓\")\n break\n" }, { "alpha_fraction": 0.40751445293426514, "alphanum_fraction": 0.4393063485622406, "avg_line_length": 25.384614944458008, "blob_id": "f05cd7f0ff39cdad63c03bd44686f4fc071a24b3", "content_id": "9cef83b1fffd8f81c6c1cbaa83a2b307e0565a39", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 349, "license_type": "no_license", "max_line_length": 84, "num_lines": 13, "path": "/README.md", "repo_name": "StarboyDH/Tic-Tac-Toe", "src_encoding": "UTF-8", "text": "# Tic-Tac-Toe\n###Tic-Tac-Toe game with Python (my first try 🤩)\n\nYou will play as **X** and the machine plays as **O**.\n\nEnter a number from 1 to 9 to choose the field you want, according to the following:\n\n 1 | 2 | 3 \n ---------------\n 4 | 5 | 6 \n ---------------\n 7 | 8 | 9 \n ---------------\n \n" } ]
2
AnthonyArmour/HackSprint
https://github.com/AnthonyArmour/HackSprint
447fc98e56fc7c18018438b594a2937f4a3184fe
da4b7968ff7afbb6c6742a8a28610a139e63443d
d642f59a7ecfdf7d5a2e7f690fd9be72107b0e42
refs/heads/master
2023-06-19T16:50:11.646330
2021-07-02T22:58:19
2021-07-02T22:58:19
381,819,456
0
0
null
2021-06-30T20:02:19
2021-07-02T19:35:49
2021-07-02T20:06:48
Python
[ { "alpha_fraction": 0.7735849022865295, "alphanum_fraction": 0.7735849022865295, "avg_line_length": 53, "blob_id": "dc09bf847bdec8468cd71e7532f56b38902aa25c", "content_id": "e4e49823c11f54096c8dcd1b9eccea08381bf30a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 53, "license_type": "no_license", "max_line_length": 53, "num_lines": 1, "path": "/assets/images/README.md", "repo_name": "AnthonyArmour/HackSprint", "src_encoding": "UTF-8", "text": "# Folder to hold all images for \"Lost Holbies\" pygame" }, { "alpha_fraction": 0.8205128312110901, "alphanum_fraction": 0.8205128312110901, "avg_line_length": 39, "blob_id": "0b590095892625936c722fa61add2ea49940ead0", "content_id": "54aa7a8849c32cf3bd51446a078aa2b6cd178689", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 39, "license_type": "no_license", "max_line_length": 39, "num_lines": 1, "path": "/assets/README.md", "repo_name": "AnthonyArmour/HackSprint", "src_encoding": "UTF-8", "text": "# Assets folder holds sounds and images" }, { "alpha_fraction": 0.6026490330696106, "alphanum_fraction": 0.6026490330696106, "avg_line_length": 24.33333396911621, "blob_id": "5825f64fd9ddc6be0c96dbb53aebc4fc5c8f2d6c", "content_id": "efb817a22e03eaa0e892820ea0c516262b4f9784", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 151, "license_type": "no_license", "max_line_length": 81, "num_lines": 6, "path": "/models/Logit.py", "repo_name": "AnthonyArmour/HackSprint", "src_encoding": "UTF-8", "text": "import os\n\ndef logit(st):\n f = open('/Users/anthonyarmour/VS_Code_Folders/HackSprint/log_file.txt', \"a\")\n f.write(\"\\n\" + st + \"\\n\")\n f.close()" }, { "alpha_fraction": 0.5833956599235535, "alphanum_fraction": 0.6148092746734619, "avg_line_length": 34.18421173095703, "blob_id": "588f486ea0e96b2781551900cc2e30dc0c20f9b6", "content_id": "f52668bdd76dc09c404ced1854e5e430bf666c5d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1337, "license_type": "no_license", "max_line_length": 81, "num_lines": 38, "path": "/models/objects.py", "repo_name": "AnthonyArmour/HackSprint", "src_encoding": "UTF-8", "text": "import pygame\n\"\"\"person_obj class which defines persons and inherits from base_obj\"\"\"\nDIM = (300, 300)\n\n\nclass person_obj():\n \"\"\"person_obj class\"\"\"\n\n def __init__(self, images, pos, name):\n self.name = name\n self.image_1 = pygame.transform.scale(pygame.image.load(images[0]), DIM)\n self.image_2 = pygame.transform.scale(pygame.image.load(images[1]), DIM)\n self.image_3 = pygame.transform.scale(pygame.image.load(images[2]), DIM)\n self.active = self.image_1\n self.pos = pygame.Rect(pos[0], pos[1], DIM[0], DIM[1])\n\n\nclass tech_obj():\n \"\"\"Tech obj class\"\"\"\n\n def __init__(self, images, pos, name):\n self.name = name\n self.image_1 = pygame.transform.scale(pygame.image.load(images[0]), DIM)\n self.image_2 = pygame.transform.scale(pygame.image.load(images[1]), DIM)\n self.image_3 = pygame.transform.scale(pygame.image.load(images[2]), DIM)\n self.active = self.image_1\n self.pos = pygame.Rect(pos[0], pos[1], DIM[0], DIM[1])\n\n\nclass question():\n \"\"\"question class\"\"\"\n\n def __init__(self, image, pos, obj_name):\n self.image = pygame.transform.scale(pygame.image.load(image), (550, 250))\n self.pos = pygame.Rect(pos[0], pos[1], 550, 250)\n self.name_id = obj_name\n self.active = False\n self.correct = False\n" }, { "alpha_fraction": 0.5908849239349365, "alphanum_fraction": 0.6138719320297241, "avg_line_length": 31.16666603088379, "blob_id": "ff598ad07faa09baa53d7f4ccd07242110fad0b1", "content_id": "e3ee5fe75918641c626350afe0e3d379bdc2bfd4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7526, "license_type": "no_license", "max_line_length": 115, "num_lines": 234, "path": "/main.py", "repo_name": "AnthonyArmour/HackSprint", "src_encoding": "UTF-8", "text": "import pygame\nimport random\nfrom models import persons_list, tech_obj_list, questions_list\nimport os\n\"\"\"Lost Little Holbie pygame script\"\"\"\npygame.init()\n\n# Window variables\nWIDTHB, HEIGHTB = 1200, 800\nWIN = pygame.display.set_mode((WIDTHB, HEIGHTB))\npygame.display.set_caption('Lost lil\\' holbie')\nFPS = 60\n\n# Backgrounds\nBACKGROUND_IMG = pygame.image.load(os.path.join('Assets/images/gameboard', 'gameboard.PNG'))\nSTART_1 = pygame.image.load(os.path.join('Assets/images/gameboard', 'welcome_1.PNG'))\nSTART_2 = pygame.image.load(os.path.join('Assets/images/gameboard', 'welcome_2.PNG'))\nSTART_3 = pygame.image.load(os.path.join('Assets/images/gameboard', 'welcome_3.PNG'))\nINSTRUCTION_IMG = pygame.image.load(os.path.join('Assets/images/gameboard', 'instruction_img.png'))\nGAME_OVER = pygame.image.load(os.path.join('Assets/images/gameboard', 'closescreen.PNG'))\n\n# Fonts/text\nFONT = pygame.font.SysFont('american typewriter', 48)\nSCOREFONT = pygame.font.SysFont('american typewriter', 120)\ntext = FONT.render('Click anywhere to continue', True, (0,0,0))\ntextRect = text.get_rect()\ntextRect.center = (600, 750)\n\n# Sounds\nDING = pygame.mixer.Sound(os.path.join(\"Assets/sounds\", \"ding.mp3\"))\nDING.set_volume(0.01)\nBONG = pygame.mixer.Sound(os.path.join(\"Assets/sounds\", \"bong.mp3\"))\nBONG.set_volume(0.01)\n\n# Misc\nSTART_BUTTON = pygame.Rect(475, 600, 300, 100)\nTRY_AGAIN = pygame.Rect(235, 625, 250, 75)\nQUIT = pygame.Rect(725, 625, 250, 75)\nCORRECT_IMG = pygame.transform.scale(\n pygame.image.load(os.path.join('Assets/images/textbox_objs', 'text_correct.PNG')),\n (250, 125))\nquestions_list[0].active = True\n\n\n# Draws the game window\n# screen: determine background\n# obj_list: objects to draw\ndef draw_window(screen, obj_list, total_correct):\n \"\"\"Draws game window\"\"\"\n if screen == \"start\":\n WIN.blit(START_1, (0, 0))\n elif screen == \"start_2\":\n WIN.blit(START_2, (0, 0))\n elif screen == \"start_3\":\n WIN.blit(START_3, (0, 0))\n elif screen == \"instruction\":\n WIN.blit(INSTRUCTION_IMG, (0, 0))\n elif screen == \"background\":\n question = get_active_question()\n WIN.blit(BACKGROUND_IMG, (0, 0))\n if question.correct:\n WIN.blit(CORRECT_IMG, (775, 150))\n WIN.blit(text, textRect)\n WIN.blit(question.image, (75, 75))\n WIN.blit(obj_list[0].active, (75, 400))\n WIN.blit(obj_list[1].active, (450, 400))\n WIN.blit(obj_list[2].active, (825, 400))\n elif screen == \"game_over\":\n WIN.blit(GAME_OVER, (0, 0))\n score = SCOREFONT.render('{} / 10'.format(total_correct), True, (255,255,255))\n scoreRect = score.get_rect()\n scoreRect.center = (600, 400)\n WIN.blit(score, scoreRect)\n pygame.display.update()\n\n\n# Determines currently active question\ndef get_active_question():\n \"\"\"Gets active question\"\"\"\n for question in questions_list:\n if question.active == True:\n return question\n\n\n# Determines which objects to display\ndef blits():\n \"\"\"Finds obj's to blit\"\"\"\n new_list = []\n active = get_active_question()\n temp_person = persons_list.copy()\n temp_tech = tech_obj_list.copy()\n\n for i, person in enumerate(temp_person):\n if person.name == active.name_id:\n new_list.append(person)\n temp_person.pop(i)\n rand = random.sample(temp_person, 2)\n [new_list.append(i) for i in rand]\n return random.sample(new_list, 3)\n\n for i, tech in enumerate(temp_tech):\n if tech.name == active.name_id:\n new_list.append(tech)\n temp_tech.pop(i)\n rand = random.sample(temp_tech, 2)\n [new_list.append(i) for i in rand]\n return random.sample(new_list, 3)\n\n\n# Sets a new active question\n# Return: True if new active question set, otherwise False\ndef set_active():\n \"\"\"Sets active question\"\"\"\n for i in range(len(questions_list)):\n if questions_list[i].active == True:\n questions_list[i].active = False\n if i + 1 < len(questions_list):\n questions_list[i + 1].active = True\n return True\n \n return False\n\n\n# Sets positions of displayed objects\n# obj_list: list of objects to display\ndef set_pos(obj_list):\n \"\"\"Sets position of rects\"\"\"\n start = 75\n for obj in obj_list:\n obj.pos.x = start\n obj.pos.y = 400\n start += 375\n\n\n# Resets background colors of objects\n# obj_list: list of objects to reset\ndef reset_colors(obj_list):\n for obj in obj_list:\n obj.active = obj.image_1\n\n\n# Reacts to clicks, based on currently displayed prompts/questions\n# event: click\n# screen: background\n# obj_list: list of active objects\ndef clicked(event, screen, obj_list, tries, total_correct):\n \"\"\"Determines whats clicked and decides actions\"\"\"\n if screen == \"start\":\n if START_BUTTON.collidepoint(event.pos):\n return \"start_2\", None, tries, total_correct\n elif screen == \"start_2\":\n return \"start_3\", None, tries, total_correct\n elif screen == \"start_3\":\n return \"instruction\", None, tries, total_correct\n elif screen == \"instruction\":\n obj_list = blits()\n reset_colors(obj_list)\n set_pos(obj_list)\n return \"background\", obj_list, tries, total_correct\n elif screen == \"background\":\n if get_active_question().correct == True:\n if tries == 1:\n total_correct += 1\n if set_active() == False:\n return \"game_over\", obj_list, tries, total_correct\n obj_list = blits()\n set_pos(obj_list)\n reset_colors(obj_list)\n return \"background\", obj_list, 0, total_correct\n else:\n tries += 1\n click_handler(event, get_active_question(), obj_list)\n elif screen == \"game_over\":\n if TRY_AGAIN.collidepoint(event.pos):\n questions_list[0].active = True\n reset_correct()\n return \"start\", None, 0, 0\n if QUIT.collidepoint(event.pos):\n pygame.quit()\n\n return screen, obj_list, tries, total_correct\n\n\n# Resets all question correct attributes to False\ndef reset_correct():\n \"\"\"reset correct attr for questions list\"\"\"\n for x in range(len(questions_list)):\n questions_list[x].correct = False\n\n\n# Handles input - determines if correct or incorrect\n# event: click\n# question: question object\n# obj_list: list of active objects\ndef click_handler(event, question, obj_list):\n \"\"\"Determines if correct option clicked or not\"\"\"\n for obj in obj_list:\n if obj.pos.collidepoint(event.pos):\n if question.name_id == obj.name:\n obj.active = obj.image_3\n question.correct = True\n DING.play()\n else:\n obj.active = obj.image_2\n BONG.play()\n\n\n\n# Game loop\ndef main():\n \"\"\"Contains game loop\"\"\"\n run = True\n clock = pygame.time.Clock()\n screen = \"start\"\n obj_list = None\n tries = 0\n total_correct = 0\n\n while run:\n clock.tick(FPS)\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n run = False\n elif event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n screen, obj_list, tries, total_correct = clicked(event, screen, obj_list, tries, total_correct)\n\n draw_window(screen, obj_list, total_correct)\n\n pygame.quit()\n\n\nif __name__ == '__main__':\n main()" }, { "alpha_fraction": 0.7735849022865295, "alphanum_fraction": 0.7735849022865295, "avg_line_length": 53, "blob_id": "e7766e5b29d6a1d13b5835a52733010e190adf41", "content_id": "aca7be68ead4de7a21db8c3fabb6b653d10e9dc2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 53, "license_type": "no_license", "max_line_length": 53, "num_lines": 1, "path": "/assets/sounds/README.md", "repo_name": "AnthonyArmour/HackSprint", "src_encoding": "UTF-8", "text": "# Folder to hold all sounds for \"Lost Holbies\" pygame" }, { "alpha_fraction": 0.6998713612556458, "alphanum_fraction": 0.73093181848526, "avg_line_length": 51.31730651855469, "blob_id": "b43b56658ef445ff58e6dc7b523e2d836b8a1cfa", "content_id": "0651c3d95690c491afc3e35f149a11bb6f02902b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5441, "license_type": "no_license", "max_line_length": 83, "num_lines": 104, "path": "/models/__init__.py", "repo_name": "AnthonyArmour/HackSprint", "src_encoding": "UTF-8", "text": "from models.objects import tech_obj, person_obj, question\nimport os\nimport random\n\n# person_obj images\nderek_images = []\nderek_images.append(os.path.join(\"Assets/images/person_objs\", \"derek_1.PNG\"))\nderek_images.append(os.path.join(\"Assets/images/person_objs\", \"derek_2.PNG\"))\nderek_images.append(os.path.join(\"Assets/images/person_objs\", \"derek_3.PNG\"))\n\ntori_images = []\ntori_images.append(os.path.join(\"Assets/images/person_objs\", \"tori_1.PNG\"))\ntori_images.append(os.path.join(\"Assets/images/person_objs\", \"tori_2.PNG\"))\ntori_images.append(os.path.join(\"Assets/images/person_objs\", \"tori_3.PNG\"))\n\nlibby_images = []\nlibby_images.append(os.path.join(\"Assets/images/person_objs\", \"libby_1.PNG\"))\nlibby_images.append(os.path.join(\"Assets/images/person_objs\", \"libby_2.PNG\"))\nlibby_images.append(os.path.join(\"Assets/images/person_objs\", \"libby_3.PNG\"))\n\nstan_images = []\nstan_images.append(os.path.join(\"Assets/images/person_objs\", \"stan_1.PNG\"))\nstan_images.append(os.path.join(\"Assets/images/person_objs\", \"stan_2.PNG\"))\nstan_images.append(os.path.join(\"Assets/images/person_objs\", \"stan_3.PNG\"))\n\nstutor_images = []\nstutor_images.append(os.path.join(\"Assets/images/person_objs\", \"stutor_1.PNG\"))\nstutor_images.append(os.path.join(\"Assets/images/person_objs\", \"stutor_2.PNG\"))\nstutor_images.append(os.path.join(\"Assets/images/person_objs\", \"stutor_3.PNG\"))\n\njulien_images = []\njulien_images.append(os.path.join(\"Assets/images/person_objs\", \"julien_1.PNG\"))\njulien_images.append(os.path.join(\"Assets/images/person_objs\", \"julien_2.PNG\"))\njulien_images.append(os.path.join(\"Assets/images/person_objs\", \"julien_3.PNG\"))\n\n# tech_obj images\ngoogle_images = []\ngoogle_images.append(os.path.join(\"Assets/images/tech_objs\", \"google_1.PNG\"))\ngoogle_images.append(os.path.join(\"Assets/images/tech_objs\", \"google_2.PNG\"))\ngoogle_images.append(os.path.join(\"Assets/images/tech_objs\", \"google_3.PNG\"))\n\nintranet_images = []\nintranet_images.append(os.path.join(\"Assets/images/tech_objs\", \"intranet_1.PNG\"))\nintranet_images.append(os.path.join(\"Assets/images/tech_objs\", \"intranet_2.PNG\"))\nintranet_images.append(os.path.join(\"Assets/images/tech_objs\", \"intranet_3.PNG\"))\n\nvs_code_images = []\nvs_code_images.append(os.path.join(\"Assets/images/tech_objs\", \"vscode_1.PNG\"))\nvs_code_images.append(os.path.join(\"Assets/images/tech_objs\", \"vscode_2.PNG\"))\nvs_code_images.append(os.path.join(\"Assets/images/tech_objs\", \"vscode_3.PNG\"))\n\nvbox_images = []\nvbox_images.append(os.path.join(\"Assets/images/tech_objs\", \"virtualbox_1.PNG\"))\nvbox_images.append(os.path.join(\"Assets/images/tech_objs\", \"virtualbox_2.PNG\"))\nvbox_images.append(os.path.join(\"Assets/images/tech_objs\", \"virtualbox_3.PNG\"))\n\ngithub_images = []\ngithub_images.append(os.path.join(\"Assets/images/tech_objs\", \"github_1.PNG\"))\ngithub_images.append(os.path.join(\"Assets/images/tech_objs\", \"github_2.PNG\"))\ngithub_images.append(os.path.join(\"Assets/images/tech_objs\", \"github_3.PNG\"))\n\n# question images\nderekQ_img = os.path.join(\"Assets/images/textbox_objs\", \"text_derek.PNG\")\ntoriQ_img = os.path.join(\"Assets/images/textbox_objs\", \"text_tori.PNG\")\ngoogleQ_img = os.path.join(\"Assets/images/textbox_objs\", \"text_google.PNG\")\nintranetQ_img = os.path.join(\"Assets/images/textbox_objs\", \"text_intranet.PNG\")\nstanQ_img = os.path.join(\"Assets/images/textbox_objs\", \"text_stan.PNG\")\njulienQ_img = os.path.join(\"Assets/images/textbox_objs\", \"text_julien.PNG\")\ngithubQ_img = os.path.join(\"Assets/images/textbox_objs\", \"text_github.PNG\")\nstutorQ_img = os.path.join(\"Assets/images/textbox_objs\", \"text_stutor.PNG\")\nvirtualboxQ_img = os.path.join(\"Assets/images/textbox_objs\", \"text_virtualbox.PNG\")\nvscodeQ_img = os.path.join(\"Assets/images/textbox_objs\", \"text_vscode.PNG\")\n\n#init persons(self, image, pos, name):\npersons_list = []\npersons_list.append(person_obj(derek_images, (200, 600), \"Derek\"))\npersons_list.append(person_obj(tori_images, (600, 600), \"Tori\"))\npersons_list.append(person_obj(libby_images, (900, 600), \"Libby\"))\npersons_list.append(person_obj(stan_images, (600, 600), \"Stan\"))\npersons_list.append(person_obj(stutor_images, (900, 600), \"stutor\"))\npersons_list.append(person_obj(julien_images, (900, 600), \"Julien\"))\n\n#init tech_objs (self, image, pos, name):\ntech_obj_list = []\ntech_obj_list.append(tech_obj(google_images, (200, 600), \"Google\"))\ntech_obj_list.append(tech_obj(intranet_images, (600, 600), \"Intranet\"))\ntech_obj_list.append(tech_obj(vs_code_images, (900, 600), \"VS Code\"))\ntech_obj_list.append(tech_obj(vbox_images, (600, 600), \"vbox\"))\ntech_obj_list.append(tech_obj(github_images, (900, 600), \"Github\"))\n\n# init questions\nquestions_list = []\nquestions_list.append(question(derekQ_img, (150, 100), persons_list[0].name))\nquestions_list.append(question(toriQ_img, (150, 100), persons_list[1].name))\nquestions_list.append(question(stanQ_img, (150, 100), persons_list[3].name))\nquestions_list.append(question(stutorQ_img, (150, 100), persons_list[4].name))\nquestions_list.append(question(julienQ_img, (150, 100), persons_list[5].name))\nquestions_list.append(question(googleQ_img, (150, 100), tech_obj_list[0].name))\nquestions_list.append(question(intranetQ_img, (150, 100), tech_obj_list[1].name))\nquestions_list.append(question(githubQ_img, (150, 100), tech_obj_list[4].name))\nquestions_list.append(question(virtualboxQ_img, (150, 100), tech_obj_list[3].name))\nquestions_list.append(question(vscodeQ_img, (150, 100), tech_obj_list[2].name))\n\nrandom.shuffle(questions_list)\n" } ]
7
punchcutter/ufoProcessor
https://github.com/punchcutter/ufoProcessor
7dea3eaf69f9cfbbbf4f53288c7975315884b7ff
88323a406152636f60f78e46cd41106445915877
2e522ec49b89734d2cac1fb1842d6bd6172752e3
refs/heads/master
2022-04-09T04:22:29.415761
2020-03-24T15:43:19
2020-03-24T15:43:19
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7624645233154297, "alphanum_fraction": 0.7677341103553772, "avg_line_length": 41.90434646606445, "blob_id": "eb7bf9200c5534671ad540047f0853a32d090521", "content_id": "ce0d9fec4e72a080ab34e5f0226bd64c92ebc9df", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 4934, "license_type": "permissive", "max_line_length": 423, "num_lines": 115, "path": "/README.md", "repo_name": "punchcutter/ufoProcessor", "src_encoding": "UTF-8", "text": "[![Travis](https://travis-ci.org/LettError/ufoProcessor.svg?branch=master)](https://travis-ci.org/LettError/ufoProcessor)\n[![PyPI](https://img.shields.io/pypi/v/ufoprocessor.svg)](https://pypi.org/project/ufoprocessor)\n\n# ufoProcessor\nPython package based on the **designSpaceDocument** from [fontTools.designspaceLib](https://github.com/fonttools/fonttools/tree/master/Lib/fontTools/designspaceLib)) specifically to _process_ and _generate_ instances for UFO files, glyphs and other data.\n\n* Collect source materials\n* Provide mutators for specific glyphs, font info, kerning so that other tools can generate partial instances. Either from `MutatorMath` or `fonttools varlib.model`.\n* Support designspace format 4 with layers.\n* Apply avar-like designspace bending\n* Apply rules\n* Generate actual UFO instances in formats 2 and 3.\n* Round geometry as requested\n* Try to stay up to date with fontTools\n* Baseclass for tools that need access to designspace data.\n\n## Usage\nThe easiest way to use ufoProcessor is to call `build(designspacePath)`\n\n* **documentPath**: path to the designspace file.\n* **outputUFOFormatVersion**: integer, 2, 3. Format for generated UFOs. Note: can be different from source UFO format.\n* **roundGeometry**: bool, if the geometry needs to be rounded to whole integers. This affects glyphs, metrics, kerning, select font info.\n* **processRules**: bool, when generating UFOs, execute designspace rules as swaps.\n* **logger**: optional logger object.\n\n* **documentPath**: filepath to the .designspace document\n* **outputUFOFormatVersion**: ufo format for output, default is the current, so 3.\n* **useVarlib**: True if you want the geometry to be generated with `varLib.model` instead of `mutatorMath`.\n\n## Examples\n\nGenerate all the instances (using the varlib model, no rounding):\n\n```python\nimport ufoProcessor\nmyPath = \"myDesignspace.designspace\"\nufoProcessor.build(myPath)\n```\n\nGenerate all the instances (using the varlib model, but round all the geometry to integers):\n\n```python\nimport ufoProcessor\nmyPath = \"myDesignspace.designspace\"\nufoProcessor.build(myPath, roundGeometry=True)\n```\n\nGenerate all the instances (using the mutatormath model, no rounding):\n\n```python\nimport ufoProcessor\nmyPath = \"myDesignspace.designspace\"\nufoProcessor.build(myPath, useVarlib=False)\n```\n\nGenerate an instance for one glyph, `\"A\"` at `width=100, weight=200`. (assuming the designspace has those axes and the masters have that glyph)\n\n```python\nimport ufoProcessor\nmyPath = \"myDesignspace.designspace\"\ndoc = ufoProcessor.DesignSpaceProcessor()\ndoc.read(myPath)\ndoc.loadFonts()\nglyphMutator = doc.getGlyphMutator(\"A\")\ninstance = glyphMutator.makeInstance(Location(width=100, weight=200)\n```\n\nDepending on the setting for `usevarlib`, the `glyphMutator` object returned by `getGlyphMutator` in the example above can either be a `MutatorMath.Mutator`, or a `VariationModelMutator` object. That uses the `fontTools.varLib.models.VariationModel` but it is wrapped and can be called as a Mutator object to generate instances. This way `DesignSpaceProcessor` does not need to know much about which math model it is using.\n\n\n## Convert Superpolator to designspace\n\nThe ufoProcessor.sp3 module has some tools for interpreting Superpolator .sp3 documents. Not all data is migrated. But the important geometry is there. Given that Superpolator can read designspace files, there is hopefully no real need for a writer. Note that this conversion is lossy. \n\n* Axis\n\t* dimensions\n\t* name\n\t* tag\n* Source\n\t* ufo path\n\t* familyname, stylename\n\t* mute state (stored in lib)\n\t* location\n* Instance\n\t* ufo path\n\t* familyname, stylename\n\t* stylemap names\n\t* location\n* Rules\n\t* *Simple Rules* are wrapped in a conditionset.\n\t* most of the really old Superpolator rules can't be converted. Only rules with `<` or `>` operators are used.\n* Some Superpolator user prefs\n\t* Preview text\n\t* Which axes used vertically and horizontally\n\n\n## Usage \n```python\n# convert sp3 file to designspace\n# first make a new designspace doc object\ndoc = DesignSpaceDocument()\n# feed it to the reader\nreader = SuperpolatorReader(sp3path, doc)\nreader.read()\n# now you can work with it, even save it\ndoc.write(designspacePath)\n```\nIndeed that last example comes from this convenience function: \n```sp3_to_designspace(sp3path, designspacePath=None)```\nIf designspacePath = None, sp3_to_designspace will use the same path for the output, but replace the `.sp3` with `.designspace` extension. If the file exists it will overwrite.\n\n## Notes\n* Glyph-specific masters in instances are ignored. \n* Instance notes are ignored. \n* Designspace geometry requires the default master to be on the default value of each axis. Superpolator handled that differently, it would find the default dynamically. So it is possible that converted designspaces need some work in terms of the basic structure. That can't be handled automatically.\n" }, { "alpha_fraction": 0.7781707644462585, "alphanum_fraction": 0.7827185392379761, "avg_line_length": 23.75, "blob_id": "44bacce394cf190d213cb8d69e8152f9e2d5deb3", "content_id": "e5e46db67768d7abf1ac51d978981837ce08fbc1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1979, "license_type": "permissive", "max_line_length": 77, "num_lines": 80, "path": "/Tests/20190830 benders/test.py", "repo_name": "punchcutter/ufoProcessor", "src_encoding": "UTF-8", "text": "\"\"\"\n\n\n\ttest with these 3 masters\n\ton 1 axis that has a map that maps to a different range\n\n\taxis values are in user coordinates\n\tdesignpsace problems should check with the proper mapped values\n\tmasters and instancees are in designspace coordinates\n\n\tgoals:\n\t* the designspace should validate\n\t* the generated intermediate should have touching shapes, just like master 2\n\t* determine if we can get rid of the bend=True/False flags\n\n Suppose the numbers in an axis map are messed up, it's then impossible\n to find the default.\n\"\"\"\n\nimport importlib\nimport ufoProcessor\nimportlib.reload(ufoProcessor)\n\n\nimport mutatorMath\nprint(mutatorMath.__file__)\nimport mutatorMath.objects.mutator\nimportlib.reload(mutatorMath.objects.mutator)\nfrom mutatorMath.objects.mutator import Location\nfrom designspaceProblems import DesignSpaceChecker\nimport collections\nfrom ufoProcessor import DesignSpaceProcessor\nfrom pprint import pprint\n\npath = \"Test.designspace\"\n\ndp = DesignSpaceProcessor()\ndp.read(path)\ndp.loadFonts()\n\ndsc = DesignSpaceChecker(dp)\ndsc.checkEverything()\npprint(dsc.problems)\nprint('hasStructuralProblems', dsc.hasStructuralProblems())\n\n\nprint(dp.newDefaultLocation())\nprint(dp.instances)\nprint('findDefault', dp.findDefault())\ndp.useVarlib = False\nprint('varlib', dp.useVarlib)\n\naxisMapper = ufoProcessor.varModels.AxisMapper(dp.axes)\nprint('axisMapper', axisMapper.getMappedAxisValues())\nr = axisMapper(Location(test=1))\n\ndefault = dp.getNeutralFont()\nprint('default.path', default.path)\ndp.generateUFO()\n\nglyphName = \"a\"\nprint('mutator for a', dp.getGlyphMutator(glyphName))\nprint('-'*40)\nprint('problems')\nfor p in dp.problems:\n\tprint(p)\nprint('-'*40)\nprint('toollog')\nfor line in dp.toolLog:\n\tprint(\"\\t\" + line)\n\n\ninstancePath = \"instances/BenderTest-Intermediate.ufo\"\ninstance = RFont(instancePath, showUI=False)\nprint(instance.info.capHeight)\nprint(instance.kerning.items())\n\nfrom mutatorMath.objects.mutator import Location\nl = Location(test=0)\nprint(l.isOrigin())" }, { "alpha_fraction": 0.5772232413291931, "alphanum_fraction": 0.5841651558876038, "avg_line_length": 43.617408752441406, "blob_id": "19a2d22ae7ae9b963feaa2727e9173b01d9788fb", "content_id": "46ca167ec9d94b3879a943c671fd4d16abb6ea29", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 22040, "license_type": "permissive", "max_line_length": 359, "num_lines": 494, "path": "/Lib/ufoProcessor/sp3.py", "repo_name": "punchcutter/ufoProcessor", "src_encoding": "UTF-8", "text": "import os\nimport glob\n\nfrom fontTools.misc.loggingTools import LogMixin\nfrom fontTools.designspaceLib import DesignSpaceDocument, AxisDescriptor, SourceDescriptor, RuleDescriptor, InstanceDescriptor\n\ntry:\n import xml.etree.cElementTree as ET\nexcept ImportError:\n import xml.etree.ElementTree as ET\n\n# Reader that parses Superpolator documents and buidls designspace objects.\n# Note: the Superpolator document format precedes the designspace documnt format.\n# For now I just want to migrate data out of Superpolator into designspace.\n# So not all data will migrate, just the stuff we can use. \n\n\"\"\"\n\n\n <lib>\n <dict>\n <key>com.letterror.skateboard.interactionSources</key>\n <dict>\n <key>horizontal</key>\n <array/>\n <key>ignore</key>\n <array/>\n <key>vertical</key>\n <array/>\n </dict>\n <key>com.letterror.skateboard.mutedSources</key>\n <array>\n <array>\n <string>IBM Plex Sans Condensed-Bold.ufo</string>\n <string>foreground</string>\n </array>\n </array>\n <key>com.letterror.skateboard.previewLocation</key>\n <dict>\n <key>weight</key>\n <real>0.0</real>\n </dict>\n <key>com.letterror.skateboard.previewText</key>\n <string>SKATE</string>\n </dict>\n </lib>\n\n\n\n\"\"\"\n\nsuperpolatorDataLibKey = \"com.superpolator.data\" # lib key for Sp data in .designspace\nskateboardInteractionSourcesKey = \"com.letterror.skateboard.interactionSources\"\nskateboardMutedSourcesKey = \"com.letterror.skateboard.mutedSources\"\nskipExportKey = \"public.skipExportGlyphs\"\nskateboardPreviewLocationsKey = \"com.letterror.skateboard.previewLocation\"\nskateboardPreviewTextKey = \"com.letterror.skateboard.previewText\"\n\nclass SuperpolatorReader(LogMixin):\n ruleDescriptorClass = RuleDescriptor\n axisDescriptorClass = AxisDescriptor\n sourceDescriptorClass = SourceDescriptor\n instanceDescriptorClass = InstanceDescriptor\n\n def __init__(self, documentPath, documentObject, convertRules=True, convertData=True, anisotropic=False):\n self.path = documentPath\n self.documentObject = documentObject\n self.convertRules = convertRules\n self.convertData = convertData\n self.allowAnisotropic = anisotropic # maybe add conversion options later\n tree = ET.parse(self.path)\n self.root = tree.getroot()\n self.documentObject.formatVersion = self.root.attrib.get(\"format\", \"3.0\")\n self.axisDefaults = {}\n self._strictAxisNames = True\n\n\n @classmethod\n def fromstring(cls, string, documentObject):\n f = BytesIO(tobytes(string, encoding=\"utf-8\"))\n self = cls(f, documentObject)\n self.path = None\n return self\n\n def read(self):\n self.readAxes()\n if self.convertData:\n self.readData()\n if self.convertRules:\n self.readOldRules()\n self.readSimpleRules()\n self.readSources()\n self.readInstances()\n\n def readData(self):\n # read superpolator specific data, view prefs etc.\n # if possible convert it to skateboard\n interactionSources = {'horizontal': [], 'vertical': [], 'ignore': []}\n ignoreElements = self.root.findall(\".ignore\")\n ignoreGlyphs = []\n for ignoreElement in ignoreElements:\n names = ignoreElement.attrib.get('glyphs')\n if names:\n ignoreGlyphs = names.split(\",\")\n if ignoreGlyphs:\n self.documentObject.lib[skipExportKey] = ignoreGlyphs\n dataElements = self.root.findall(\".data\")\n if not dataElements:\n return\n newLib = {}\n interactionSourcesAdded = False\n for dataElement in dataElements:\n name = dataElement.attrib.get('name')\n value = dataElement.attrib.get('value')\n if value in ['True', 'False']:\n value = value == \"True\"\n else:\n try:\n value = float(value)\n except ValueError:\n pass\n if name == \"previewtext\":\n self.documentObject.lib[skateboardPreviewTextKey] = value\n elif name == \"horizontalPreviewAxis\":\n interactionSources['horizontal'].append(value)\n interactionSourcesAdded = True\n elif name == \"verticalPreviewAxis\":\n interactionSources['vertical'].append(value)\n interactionSourcesAdded = True\n \n newLib[name] = value\n if interactionSourcesAdded:\n self.documentObject.lib[skateboardInteractionSourcesKey] = interactionSources\n if newLib:\n self.documentObject.lib[superpolatorDataLibKey] = newLib\n\n\n def readOldRules(self):\n # read the old rules\n # <rule enabled=\"1\" logic=\"all\" resultfalse=\"B\" resulttrue=\"B.round\">\n # <condition axisname=\"AxisWidth\" operator=\"==\" xvalue=\"100.000000\"/>\n # </rule>\n\n # superpolator old rule to simple rule\n # if op in ['<', '<=']:\n # # old style data\n # axes[axisName]['maximum'] = conditionDict['values']\n # newRule.name = \"converted %s < and <= \"%(axisName)\n # elif op in ['>', '>=']:\n # # old style data\n # axes[axisName]['minimum'] = conditionDict['values']\n # newRule.name = \"converted %s > and >= \"%(axisName)\n # elif op == \"==\":\n # axes[axisName]['maximum'] = conditionDict['values']\n # axes[axisName]['minimum'] = conditionDict['values']\n # newRule.name = \"converted %s == \"%(axisName)\n # newRule.enabled = False\n # elif op == \"!=\":\n # axes[axisName]['maximum'] = conditionDict['values']\n # axes[axisName]['minimum'] = conditionDict['values']\n # newRule.name = \"unsupported %s != \"%(axisName)\n # newRule.enabled = False\n # else:\n # axes[axisName]['maximum'] = conditionDict['minimum']\n # axes[axisName]['minimum'] = conditionDict['maximum']\n # newRule.name = \"minmax legacy rule for %s\"%axisName\n # newRule.enabled = False\n\n rules = []\n for oldRuleElement in self.root.findall(\".rule\"):\n ruleObject = self.ruleDescriptorClass()\n # only one condition set in these old rules\n cds = []\n a = oldRuleElement.attrib['resultfalse']\n b = oldRuleElement.attrib['resulttrue']\n ruleObject.subs.append((a,b))\n for oldConditionElement in oldRuleElement.findall(\".condition\"):\n cd = {}\n operator = oldConditionElement.attrib['operator']\n axisValue = float(oldConditionElement.attrib['xvalue'])\n axisName = oldConditionElement.attrib['axisname']\n if operator in ['<', '<=']:\n cd['maximum'] = axisValue\n cd['minimum'] = None\n cd['name'] = axisName\n ruleObject.name = \"converted %s < and <= \"%(axisName)\n elif operator in ['>', '>=']:\n cd['maximum'] = None\n cd['minimum'] = axisValue\n cd['name'] = axisName\n ruleObject.name = \"converted %s > and >= \"%(axisName)\n elif operator in [\"==\", \"!=\"]:\n # can't convert this one\n continue\n cds.append(cd)\n if cds:\n ruleObject.conditionSets.append(cds)\n self.documentObject.addRule(ruleObject)\n\n def readSimpleRules(self):\n # read the simple rule elements\n # <simplerules>\n # <simplerule enabled=\"1\" name=\"width: &lt; 500.0\">\n # <sub name=\"I\" with=\"I.narrow\"/>\n # <condition axisname=\"width\" maximum=\"500\"/>\n # <condition axisname=\"grade\" minimum=\"0\" maximum=\"500\"/>\n # </simplerule>\n # </simplerules>\n\n\n rulesContainerElements = self.root.findall(\".simplerules\")\n rules = []\n for rulesContainerElement in rulesContainerElements:\n for ruleElement in rulesContainerElement:\n ruleObject = self.ruleDescriptorClass()\n ruleName = ruleObject.name = ruleElement.attrib['name']\n # subs\n for subElement in ruleElement.findall('.sub'):\n a = subElement.attrib['name']\n b = subElement.attrib['with']\n ruleObject.subs.append((a, b))\n # condition sets, .sp3 had none\n externalConditions = self._readConditionElements(\n ruleElement,\n ruleName,\n )\n if externalConditions:\n ruleObject.conditionSets.append(externalConditions)\n self.log.info(\n \"Found stray rule conditions outside a conditionset. \"\n \"Wrapped them in a new conditionset.\"\n )\n self.documentObject.addRule(ruleObject)\n\n def _readConditionElements(self, parentElement, ruleName=None):\n # modified from the method from fonttools.designspaceLib\n # it's not the same!\n cds = []\n for conditionElement in parentElement.findall('.condition'):\n cd = {}\n cdMin = conditionElement.attrib.get(\"minimum\")\n if cdMin is not None:\n cd['minimum'] = float(cdMin)\n else:\n # will allow these to be None, assume axis.minimum\n cd['minimum'] = None\n cdMax = conditionElement.attrib.get(\"maximum\")\n if cdMax is not None:\n cd['maximum'] = float(cdMax)\n else:\n # will allow these to be None, assume axis.maximum\n cd['maximum'] = None\n cd['name'] = conditionElement.attrib.get(\"axisname\")\n # # test for things\n if cd.get('minimum') is None and cd.get('maximum') is None:\n raise DesignSpaceDocumentError(\n \"condition missing required minimum or maximum in rule\" +\n (\" '%s'\" % ruleName if ruleName is not None else \"\"))\n cds.append(cd)\n return cds\n\n def readAxes(self):\n # read the axes elements, including the warp map.\n axisElements = self.root.findall(\".axis\")\n if not axisElements:\n # raise error, we need axes\n return\n for axisElement in axisElements:\n axisObject = self.axisDescriptorClass()\n axisObject.name = axisElement.attrib.get(\"name\")\n axisObject.tag = axisElement.attrib.get(\"shortname\")\n axisObject.minimum = float(axisElement.attrib.get(\"minimum\"))\n axisObject.maximum = float(axisElement.attrib.get(\"maximum\"))\n axisObject.default = float(axisElement.attrib.get(\"initialvalue\", axisObject.minimum))\n self.documentObject.axes.append(axisObject)\n self.axisDefaults[axisObject.name] = axisObject.default\n self.documentObject.defaultLoc = self.axisDefaults\n\n def colorFromElement(self, element):\n elementColor = None\n for colorElement in element.findall('.color'):\n elementColor = self.readColorElement(colorElement)\n\n def readColorElement(self, colorElement):\n pass\n\n def locationFromElement(self, element):\n elementLocation = None\n for locationElement in element.findall('.location'):\n elementLocation = self.readLocationElement(locationElement)\n break\n if not self.allowAnisotropic:\n # don't want any anisotropic values here\n split = {}\n for k, v in elementLocation.items():\n if type(v) == type(()):\n split[k] = v[0]\n else:\n split[k] = v\n elementLocation = split\n return elementLocation\n\n def readLocationElement(self, locationElement):\n \"\"\" Format 0 location reader \"\"\"\n if self._strictAxisNames and not self.documentObject.axes:\n raise DesignSpaceDocumentError(\"No axes defined\")\n loc = {}\n for dimensionElement in locationElement.findall(\".dimension\"):\n dimName = dimensionElement.attrib.get(\"name\")\n if self._strictAxisNames and dimName not in self.axisDefaults:\n # In case the document contains no axis definitions,\n self.log.warning(\"Location with undefined axis: \\\"%s\\\".\", dimName)\n continue\n xValue = yValue = None\n try:\n xValue = dimensionElement.attrib.get('xvalue')\n xValue = float(xValue)\n except ValueError:\n self.log.warning(\"KeyError in readLocation xValue %3.3f\", xValue)\n try:\n yValue = dimensionElement.attrib.get('yvalue')\n if yValue is not None:\n yValue = float(yValue)\n except ValueError:\n pass\n if yValue is not None:\n loc[dimName] = (xValue, yValue)\n else:\n loc[dimName] = xValue\n return loc\n\n def readSources(self):\n for sourceCount, sourceElement in enumerate(self.root.findall(\".master\")):\n filename = sourceElement.attrib.get('filename')\n if filename is not None and self.path is not None:\n sourcePath = os.path.abspath(os.path.join(os.path.dirname(self.path), filename))\n else:\n sourcePath = None\n sourceName = sourceElement.attrib.get('name')\n if sourceName is None:\n # add a temporary source name\n sourceName = \"temp_master.%d\" % (sourceCount)\n sourceObject = self.sourceDescriptorClass()\n sourceObject.path = sourcePath # absolute path to the ufo source\n sourceObject.filename = filename # path as it is stored in the document\n sourceObject.name = sourceName\n familyName = sourceElement.attrib.get(\"familyname\")\n if familyName is not None:\n sourceObject.familyName = familyName\n styleName = sourceElement.attrib.get(\"stylename\")\n if styleName is not None:\n sourceObject.styleName = styleName\n sourceObject.location = self.locationFromElement(sourceElement)\n isMuted = False\n for maskedElement in sourceElement.findall('.maskedfont'):\n # mute isn't stored in the sourceDescriptor, but we can store it in the lib\n if maskedElement.attrib.get('font') == \"1\":\n isMuted = True\n for libElement in sourceElement.findall('.provideLib'):\n if libElement.attrib.get('state') == '1':\n sourceObject.copyLib = True\n for groupsElement in sourceElement.findall('.provideGroups'):\n if groupsElement.attrib.get('state') == '1':\n sourceObject.copyGroups = True\n for infoElement in sourceElement.findall(\".provideInfo\"):\n if infoElement.attrib.get('state') == '1':\n sourceObject.copyInfo = True\n for featuresElement in sourceElement.findall(\".provideFeatures\"):\n if featuresElement.attrib.get('state') == '1':\n sourceObject.copyFeatures = True\n for glyphElement in sourceElement.findall(\".glyph\"):\n glyphName = glyphElement.attrib.get('name')\n if glyphName is None:\n continue\n if glyphElement.attrib.get('mute') == '1':\n sourceObject.mutedGlyphNames.append(glyphName)\n self.documentObject.sources.append(sourceObject)\n if isMuted:\n if not skateboardMutedSourcesKey in self.documentObject.lib:\n self.documentObject.lib[skateboardMutedSourcesKey] = []\n item = (sourceObject.filename, \"foreground\")\n self.documentObject.lib[skateboardMutedSourcesKey].append(item)\n\n def readInstances(self):\n for instanceCount, instanceElement in enumerate(self.root.findall(\".instance\")):\n instanceObject = self.instanceDescriptorClass()\n if instanceElement.attrib.get(\"familyname\"):\n instanceObject.familyName = instanceElement.attrib.get(\"familyname\")\n if instanceElement.attrib.get(\"stylename\"):\n instanceObject.styleName = instanceElement.attrib.get(\"stylename\")\n if instanceElement.attrib.get(\"styleMapFamilyName\"):\n instanceObject.styleMapFamilyName = instanceElement.attrib.get(\"styleMapFamilyName\")\n if instanceElement.attrib.get(\"styleMapStyleName\"):\n instanceObject.styleMapStyleName = instanceElement.attrib.get(\"styleMapStyleName\")\n if instanceElement.attrib.get(\"styleMapFamilyName\"):\n instanceObject.styleMapFamilyName = instanceElement.attrib.get(\"styleMapFamilyName\")\n instanceObject.location = self.locationFromElement(instanceElement)\n instanceObject.filename = instanceElement.attrib.get('filename')\n for libElement in instanceElement.findall('.provideLib'):\n if libElement.attrib.get('state') == '1':\n instanceObject.lib = True\n for libElement in instanceElement.findall('.provideInfo'):\n if libElement.attrib.get('state') == '1':\n instanceObject.info = True\n self.documentObject.instances.append(instanceObject)\n\ndef sp3_to_designspace(sp3path, designspacePath=None):\n if designspacePath is None:\n designspacePath = sp3path.replace(\".sp3\", \".designspace\")\n doc = DesignSpaceDocument()\n reader = SuperpolatorReader(sp3path, doc)\n reader.read()\n doc.write(designspacePath)\n\n\nif __name__ == \"__main__\":\n\n def test_superpolator_testdoc1():\n # read superpolator_testdoc1.sp3\n # and test all the values\n testDoc = DesignSpaceDocument()\n testPath = \"../../Tests/spReader_testdocs/superpolator_testdoc1.sp3\"\n reader = SuperpolatorReader(testPath, testDoc)\n reader.read()\n\n # check the axes\n names = [a.name for a in reader.documentObject.axes]\n names.sort()\n assert names == ['grade', 'space', 'weight', 'width']\n tags = [a.tag for a in reader.documentObject.axes]\n tags.sort()\n assert tags == ['SPCE', 'grad', 'wdth', 'wght']\n\n # check the data items\n assert superpolatorDataLibKey in reader.documentObject.lib\n items = list(reader.documentObject.lib[superpolatorDataLibKey].items())\n items.sort()\n assert items == [('expandRules', False), ('horizontalPreviewAxis', 'width'), ('includeLegacyRules', False), ('instancefolder', 'instances'), ('keepWorkFiles', True), ('lineInverted', True), ('lineStacked', 'lined'), ('lineViewFilled', True), ('outputFormatUFO', 3.0), ('previewtext', 'VA'), ('roundGeometry', False), ('verticalPreviewAxis', 'weight')]\n\n # check the sources\n for sd in reader.documentObject.sources:\n assert sd.familyName == \"MutatorMathTest_SourceFamilyName\"\n if sd.styleName == \"Default\":\n assert sd.location == {'width': 0.0, 'weight': 0.0, 'space': 0.0, 'grade': -0.5}\n assert sd.copyLib == True\n assert sd.copyGroups == True\n assert sd.copyInfo == True\n assert sd.copyFeatures == True\n elif sd.styleName == \"TheOther\":\n assert sd.location == {'width': 0.0, 'weight': 1000.0, 'space': 0.0, 'grade': -0.5}\n assert sd.copyLib == False\n assert sd.copyGroups == False\n assert sd.copyInfo == False\n assert sd.copyFeatures == False\n\n # check the instances\n for nd in reader.documentObject.instances:\n assert nd.familyName == \"MutatorMathTest_InstanceFamilyName\"\n if nd.styleName == \"AWeightThatILike\":\n assert nd.location == {'width': 133.152174, 'weight': 723.981097, 'space': 0.0, 'grade': -0.5}\n assert nd.filename == \"instances/MutatorMathTest_InstanceFamilyName-AWeightThatILike.ufo\"\n assert nd.styleMapFamilyName == None\n assert nd.styleMapStyleName == None\n if nd.styleName == \"wdth759.79_SPCE0.00_wght260.72\":\n # note the converted anisotropic location in the width axis.\n assert nd.location == {'grade': -0.5, 'width': 500.0, 'weight': 260.7217, 'space': 0.0}\n assert nd.filename == \"instances/MutatorMathTest_InstanceFamilyName-wdth759.79_SPCE0.00_wght260.72.ufo\"\n assert nd.styleMapFamilyName == \"StyleMappedFamily\"\n assert nd.styleMapStyleName == \"bold\"\n\n # check the rules\n for rd in reader.documentObject.rules:\n assert rd.name == \"width: < 500.0\"\n assert len(rd.conditionSets) == 1\n assert rd.subs == [('I', 'I.narrow')]\n for conditionSet in rd.conditionSets:\n for cd in conditionSet:\n if cd['name'] == \"width\":\n assert cd == {'minimum': None, 'maximum': 500.0, 'name': 'width'}\n if cd['name'] == \"grade\":\n assert cd == {'minimum': 0.0, 'maximum': 500.0, 'name': 'grade'}\n\n\n testDoc.write(testPath.replace(\".sp3\", \"_output_roundtripped.designspace\"))\n\n def test_testDocs():\n # read the test files and convert them\n # no tests\n root = \"../../Tests/spReader_testdocs/test*.sp3\"\n for path in glob.glob(root):\n sp3_to_designspace(path)\n\n test_superpolator_testdoc1()\n #test_testDocs()" }, { "alpha_fraction": 0.5940027236938477, "alphanum_fraction": 0.6247978210449219, "avg_line_length": 33.79220962524414, "blob_id": "af4a6d9162cf41427ca7247c48d6b95b674d7ed7", "content_id": "cf590533410bb2cf15f98bbbde70d61b89331a99", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 16074, "license_type": "permissive", "max_line_length": 136, "num_lines": 462, "path": "/Tests/tests.py", "repo_name": "punchcutter/ufoProcessor", "src_encoding": "UTF-8", "text": "# standalone test\nimport shutil\nimport os\nimport defcon.objects.font\nimport fontParts.fontshell.font\nimport logging\nfrom ufoProcessor import *\n\n\n# new place for ufoProcessor tests.\n# Run in regular python of choice, not ready for pytest just yet. \n# You may ask \"why not?\" - you may ask indeed.\n\n# make the tests w ork with defcon as well as fontparts\n\ndef addExtraGlyph(font, name, s=200):\n font.newGlyph(name)\n g = font[name]\n p = g.getPen()\n p.moveTo((0,0))\n p.lineTo((s,0))\n p.lineTo((s,s))\n p.lineTo((0,s))\n p.closePath()\n g.width = s\n\ndef addGlyphs(font, s, addSupportLayer=True):\n # we need to add the glyphs\n step = 0\n uni = 95\n for n in ['glyphOne', 'glyphTwo', 'glyphThree', 'glyphFour', 'glyphFive']:\n font.newGlyph(n)\n g = font[n]\n p = g.getPen()\n p.moveTo((0,0))\n p.lineTo((s,0))\n p.lineTo((s,s))\n p.lineTo((0,s))\n p.closePath()\n g.move((0,s+step))\n g.width = s\n g.unicode = uni\n uni += 1\n step += 50\n for n, w in [('wide', 800), ('narrow', 100)]:\n font.newGlyph(n)\n g = font[n]\n p = g.getPen()\n p.moveTo((0,0))\n p.lineTo((w,0))\n p.lineTo((w,font.info.ascender))\n p.lineTo((0,font.info.ascender))\n p.closePath()\n g.width = w\n\n if addSupportLayer:\n font.newLayer('support')\n layer = font.layers['support']\n layer.newGlyph('glyphFive')\n layer.newGlyph('glyphOne') # add an empty glyph to see how it is treated\n lg = layer['glyphFive']\n p = lg.getPen()\n w = 10\n y = -400\n p.moveTo((0,y))\n p.lineTo((s,y))\n p.lineTo((s,y+100))\n p.lineTo((0,y+100))\n p.closePath()\n lg.width = s\n\n font.newGlyph(\"wide.component\")\n g = font[\"wide.component\"]\n comp = g.instantiateComponent()\n comp.baseGlyph = \"wide\"\n comp.offset = (0,0)\n g.appendComponent(comp)\n g.width = font['wide'].width\n font.newGlyph(\"narrow.component\")\n g = font[\"narrow.component\"]\n comp = g.instantiateComponent()\n comp.baseGlyph = \"narrow\"\n comp.offset = (0,0)\n g.appendComponent(comp)\n g.width = font['narrow'].width\n uniValue = 200\n for g in font:\n g.unicode = uniValue\n uniValue += 1\n\n\ndef fillInfo(font):\n font.info.unitsPerEm = 1000\n font.info.ascender = 800\n font.info.descender = -200\n\ndef _create_parent_dir(ufo_path):\n \"\"\"\n Creates the parent directory where the UFO will be saved, in case it\n doesn't exist already. This is required because fontTools.ufoLib no\n longer calls os.makedirs.\n \"\"\"\n directory = os.path.dirname(os.path.normpath(ufo_path))\n if directory and not os.path.exists(directory):\n os.makedirs(directory)\n\ndef _makeTestFonts(rootPath):\n \"\"\" Make some test fonts that have the kerning problem.\"\"\"\n path1 = os.path.join(rootPath, \"masters\", \"geometryMaster1.ufo\")\n path2 = os.path.join(rootPath, \"masters\", \"geometryMaster2.ufo\")\n path3 = os.path.join(rootPath, \"instances\", \"geometryInstance%3.3f.ufo\")\n path4 = os.path.join(rootPath, \"anisotropic_instances\", \"geometryInstanceAnisotropic1.ufo\")\n path5 = os.path.join(rootPath, \"anisotropic_instances\", \"geometryInstanceAnisotropic2.ufo\")\n path6 = os.path.join(rootPath, \"instances\", \"extrapolate\", \"geometryInstance%s.ufo\")\n f1 = Font()\n fillInfo(f1)\n addGlyphs(f1, 100, addSupportLayer=False)\n addExtraGlyph(f1, \"extra.glyph.for.neutral\")\n f1.features.text = u\"# features text from master 1\"\n f2 = Font()\n fillInfo(f2)\n addGlyphs(f2, 500, addSupportLayer=True)\n addExtraGlyph(f2, \"extra.glyph.for.master2\")\n f2.features.text = u\"# features text from master 2\"\n f1.info.ascender = 400\n f1.info.descender = -200\n f1.info.xHeight = 200\n f1.info.capHeight = 400\n f2.info.ascender = 600\n f2.info.descender = -100\n f2.info.xHeight = 200\n f2.info.capHeight = 600\n f1.info.copyright = u\"This is the copyright notice from master 1\"\n f2.info.copyright = u\"This is the copyright notice from master 2\"\n f1.lib['ufoProcessor.test.lib.entry'] = \"Lib entry for master 1\"\n f2.lib['ufoProcessor.test.lib.entry'] = \"Lib entry for master 2\"\n \n f1.info.postscriptBlueValues = [100, 110]\n f2.info.postscriptBlueValues = [120, 125]\n f1.info.postscriptBlueFuzz = 0\n f2.info.postscriptBlueFuzz = 1\n f1.info.postscriptBlueScale = 0.11 # should not round\n f1.info.postscriptBlueScale = 0.22\n\n f1.info.openTypeHheaAscender = 1036\n f1.info.openTypeHheaDescender = -335\n f1.info.openTypeOS2TypoAscender = 730\n f1.info.openTypeOS2TypoDescender = -270\n f1.info.openTypeOS2WinAscent = 1036\n f1.info.openTypeOS2WinDescent = 335\n\n f1.groups[\"public.kern1.groupA\"] = ['glyphOne', 'glyphTwo']\n f1.groups[\"public.kern2.groupB\"] = ['glyphThree', 'glyphFour']\n f2.groups.update(f1.groups)\n\n f1.kerning[('public.kern1.groupA', 'public.kern2.groupB')] = -100\n f2.kerning[('public.kern1.groupA', 'public.kern2.groupB')] = -200\n\n f1.kerning[('glyphOne', 'glyphOne')] = -100\n f2.kerning[('glyphOne', 'glyphOne')] = 0\n f1.kerning[('glyphOne', 'glyphThree')] = 10\n f1.kerning[('glyphOne', 'glyphFour')] = 10\n # exception\n f2.kerning[('glyphOne', 'glyphThree')] = 1\n f2.kerning[('glyphOne', 'glyphFour')] = 0\n print([l.name for l in f1.layers], [l.name for l in f2.layers])\n\n _create_parent_dir(path1)\n _create_parent_dir(path2)\n f1.save(path1, 3)\n f2.save(path2, 3)\n return path1, path2, path3, path4, path5, path6\n\ndef _makeSwapFonts(rootPath):\n \"\"\" Make some test fonts that have the kerning problem.\"\"\"\n path1 = os.path.join(rootPath, \"Swap.ufo\")\n path2 = os.path.join(rootPath, \"Swapped.ufo\")\n f1 = Font()\n fillInfo(f1)\n addGlyphs(f1, 100)\n f1.features.text = u\"# features text from master 1\"\n f1.info.ascender = 800\n f1.info.descender = -200\n f1.kerning[('glyphOne', 'glyphOne')] = -10\n f1.kerning[('glyphTwo', 'glyphTwo')] = 10\n f1.save(path1, 2)\n return path1, path2\n\nclass DesignSpaceProcessor_using_defcon(DesignSpaceProcessor):\n def _instantiateFont(self, path):\n return defcon.objects.font.Font(path)\n\nclass DesignSpaceProcessor_using_fontparts(DesignSpaceProcessor):\n def _instantiateFont(self, path):\n return fontParts.fontshell.font.RFont(path)\n\ndef _makeTestDocument(docPath, useVarlib=True, useDefcon=True):\n # make the test fonts and a test document\n if useVarlib:\n extension = \"varlib\"\n else:\n extension = \"mutator\"\n testFontPath = os.path.join(os.path.dirname(docPath), \"automatic_testfonts_%s\" % extension)\n print(\"\\ttestFontPath:\", testFontPath)\n m1, m2, i1, anisotropicInstancePath1, anisotropicInstancePath2, extrapolatePath = _makeTestFonts(testFontPath)\n if useDefcon:\n d = DesignSpaceProcessor_using_defcon(useVarlib=useVarlib)\n else:\n d = DesignSpaceProcessor_using_fontparts(useVarlib=useVarlib)\n print(\"\\td\", d, type(d))\n a = AxisDescriptor()\n a.name = \"pop\"\n a.minimum = 0\n a.maximum = 1000\n a.default = 0\n a.tag = \"pop*\"\n a.map = [(0,10),(500,250),(1000,990)]\n d.addAxis(a)\n\n s1 = SourceDescriptor()\n s1.path = m1\n s1.location = dict(pop=a.map_forward(a.default))\n s1.name = \"test.master.1\"\n s1.copyInfo = True\n s1.copyFeatures = True\n s1.copyLib = True\n d.addSource(s1)\n\n s2 = SourceDescriptor()\n s2.path = m2\n s2.location = dict(pop=1000)\n s2.name = \"test.master.2\"\n s2.muteKerning = True\n d.addSource(s2)\n\n s3 = SourceDescriptor()\n s3.path = m2\n s3.location = dict(pop=500)\n s3.name = \"test.master.support.1\"\n s3.layerName = \"support\"\n d.addSource(s3)\n\n s4 = SourceDescriptor()\n s4.path = \"missing.ufo\"\n s4.location = dict(pop=600)\n s4.name = \"test.missing.master\"\n d.addSource(s4)\n\n s5 = SourceDescriptor()\n s5.path = m2\n s5.location = dict(pop=620)\n s5.name = \"test.existing.ufo_missing.layer\"\n s5.layerName = \"missing.layer\"\n d.addSource(s5)\n\n d.findDefault()\n # make sure the default location is bend and unbend as we want.\n assert d.newDefaultLocation().get('pop') == 0\n assert d.newDefaultLocation(bend=True).get('pop') == 10\n\n steps = 6\n for counter in range(steps):\n factor = counter / steps \n i = InstanceDescriptor()\n v = a.minimum+factor*(a.maximum-a.minimum)\n i.path = i1 % v\n i.familyName = \"TestFamily\"\n i.styleName = \"TestStyle_pop%3.3f\" % (v)\n i.name = \"%s-%s\" % (i.familyName, i.styleName)\n i.location = dict(pop=v)\n i.info = True\n i.kerning = True\n i.postScriptFontName = \"TestFamily PSName %s\" % i.styleName\n if counter == 2:\n i.glyphs['glyphTwo'] = dict(name=\"glyphTwo\", mute=True)\n i.copyLib = True\n if counter == 2:\n i.glyphs['narrow'] = dict(instanceLocation=dict(pop=400), unicodes=[0x123, 0x124, 0x125])\n d.addInstance(i)\n\n # add extrapolatiing location\n i = InstanceDescriptor()\n i.path = extrapolatePath % \"TestStyle_Extrapolate\"\n print('i.path', i.path)\n i.familyName = \"TestFamily\"\n i.styleName = \"TestStyle_Extrapolate\"\n i.name = \"%s-%s\" % (i.familyName, i.styleName)\n i.location = dict(pop=3000)\n i.info = True\n i.kerning = True\n d.addInstance(i)\n\n # add anisotropic locations\n i = InstanceDescriptor()\n i.path = anisotropicInstancePath1\n i.familyName = \"TestFamily\"\n i.styleName = \"TestStyle_pop_anisotropic1\"\n i.name = \"%s-%s\" % (i.familyName, i.styleName)\n i.location = dict(pop=(1000, 0))\n i.info = True\n i.kerning = True\n d.addInstance(i)\n\n i = InstanceDescriptor()\n i.path = anisotropicInstancePath2\n i.familyName = \"TestFamily\"\n i.styleName = \"TestStyle_pop_anisotropic2\"\n i.name = \"%s-%s\" % (i.familyName, i.styleName)\n i.location = dict(pop=(0, 1000))\n i.info = True\n i.kerning = True\n d.addInstance(i)\n\n # add data to the document lib\n d.lib['ufoprocessor.testdata'] = dict(pop=500, name=\"This is a named location, stored in the document lib.\")\n\n d.write(docPath)\n\ndef _testGenerateInstances(docPath, useVarlib=True, useDefcon=True, roundGeometry=False):\n # execute the test document\n if useDefcon:\n d = DesignSpaceProcessor_using_defcon(useVarlib=useVarlib)\n else:\n d = DesignSpaceProcessor_using_fontparts(useVarlib=useVarlib)\n d.read(docPath)\n d.loadFonts()\n print('---', d.newDefaultLocation())\n d.roundGeometry = roundGeometry\n objectFlavor = [type(f).__name__ for f in d.fonts.values()][0]\n print(\"objectFlavor\", objectFlavor)\n d.generateUFO()\n if d.problems:\n print(\"log:\")\n for p in d.problems:\n print(\"\\t\",p)\n\ndef testSwap(docPath):\n srcPath, dstPath = _makeSwapFonts(os.path.dirname(docPath))\n f = Font(srcPath)\n swapGlyphNames(f, \"narrow\", \"wide\")\n f.info.styleName = \"Swapped\"\n f.save(dstPath)\n # test the results in newly opened fonts\n old = Font(srcPath)\n new = Font(dstPath)\n assert new.kerning.get((\"narrow\", \"narrow\")) == old.kerning.get((\"wide\",\"wide\"))\n assert new.kerning.get((\"wide\", \"wide\")) == old.kerning.get((\"narrow\",\"narrow\"))\n # after the swap these widths should be the same\n assert old['narrow'].width == new['wide'].width\n assert old['wide'].width == new['narrow'].width\n # The following test may be a bit counterintuitive:\n # the rule swaps the glyphs, but we do not want glyphs that are not\n # specifically affected by the rule to *appear* any different.\n # So, components have to be remapped. \n assert new['wide.component'].components[0].baseGlyph == \"narrow\"\n assert new['narrow.component'].components[0].baseGlyph == \"wide\"\n\ndef testAxisMuting():\n d = DesignSpaceProcessor_using_defcon(useVarlib=True)\n\n a = AxisDescriptor()\n a.name = \"pop\"\n a.minimum = 0\n a.maximum = 1000\n a.default = 0\n a.tag = \"pop*\"\n d.addAxis(a)\n\n a = AxisDescriptor()\n a.name = \"snap\"\n a.minimum = 100\n a.maximum = 200\n a.default = 150\n a.tag = \"snap\"\n d.addAxis(a)\n\n a = AxisDescriptor()\n a.name = \"crackle\"\n a.minimum = -1\n a.maximum = 1\n a.default = 0\n a.tag = \"krak\"\n d.addAxis(a)\n\n shouldIgnore, loc = d.filterThisLocation(dict(snap=150, crackle=0, pop=0), [])\n assert shouldIgnore == False\n assert loc == {'snap': 150, 'crackle': 0, 'pop': 0}\n\n shouldIgnore, loc = d.filterThisLocation(dict(snap=150, crackle=0, pop=0), ['pop'])\n assert shouldIgnore == False\n assert loc == {'snap': 150, 'crackle': 0}\n\n shouldIgnore, loc = d.filterThisLocation(dict(snap=150, crackle=0, pop=1), ['pop'])\n assert shouldIgnore == True\n assert loc == {'snap': 150, 'crackle': 0}\n\n shouldIgnore, loc = d.filterThisLocation(dict(snap=150, crackle=0, pop=0), ['pop', 'crackle'])\n assert shouldIgnore == False\n assert loc == {'snap': 150}\n\n shouldIgnore, loc = d.filterThisLocation(dict(snap=150, crackle=0, pop=1), ['pop', 'crackle', 'snap'])\n assert shouldIgnore == True\n assert loc == {}\n\n shouldIgnore, loc = d.filterThisLocation(dict(snap=150, crackle=0, pop=0), ['one', 'two', 'three'])\n assert shouldIgnore == False\n assert loc == {'snap': 150, 'crackle': 0, 'pop': 0}\n\n shouldIgnore, loc = d.filterThisLocation(dict(snap=150, crackle=0, pop=1), ['one', 'two', 'three'])\n assert shouldIgnore == False\n assert loc == {'snap': 150, 'crackle': 0, 'pop': 1}\n \ndef testUnicodes(docPath, useVarlib=True):\n # after executing testSwap there should be some test fonts\n # let's check if the unicode values for glyph \"narrow\" arrive at the right place.\n d = DesignSpaceProcessor(useVarlib=useVarlib)\n d.read(docPath)\n for instance in d.instances:\n if os.path.exists(instance.path):\n f = Font(instance.path)\n print(\"instance.path\", instance.path)\n print(\"instance.name\", instance.name, \"f['narrow'].unicodes\", f['narrow'].unicodes)\n if instance.name == \"TestFamily-TestStyle_pop1000.000\":\n assert f['narrow'].unicodes == [291, 292, 293]\n else:\n assert f['narrow'].unicodes == [207]\n else:\n print(\"Missing test font at %s\" % instance.path)\n\nselfTest = True\nif selfTest:\n for extension in ['mutator', 'varlib']:\n for objectFlavor in ['defcon', 'fontparts']:\n for roundGeometry in [True, False]:\n # which object model to use for **executuing** the designspace.\n # all the objects in **this test** are defcon. \n\n print(\"\\n\\nRunning the test with \", extension, \"and\", objectFlavor, \"roundGeometry:\", roundGeometry)\n print(\"-\"*40)\n USEVARLIBMODEL = extension == 'varlib'\n if roundGeometry:\n roundingTag = \"_rounded_geometry\"\n else:\n roundingTag = \"\"\n testRoot = os.path.join(os.getcwd(), \"automatic_testfonts_%s_%s%s\" % (extension, objectFlavor, roundingTag))\n print(\"\\ttestRoot\", testRoot)\n if os.path.exists(testRoot):\n shutil.rmtree(testRoot)\n docPath = os.path.join(testRoot, \"automatic_test.designspace\")\n print(\"\\tdocPath\", docPath)\n print(\"-\"*40)\n print(\"Generate document, masters\")\n _makeTestDocument(docPath, useVarlib=USEVARLIBMODEL, useDefcon=objectFlavor==\"defcon\")\n print(\"-\"*40)\n print(\"Generate instances\", docPath)\n _testGenerateInstances(docPath, useVarlib=USEVARLIBMODEL, useDefcon=objectFlavor==\"defcon\", roundGeometry=roundGeometry)\n testSwap(docPath)\n #_makeTestDocument(docPath, useVarlib=USEVARLIBMODEL, useDefcon=objectFlavor==\"defcon\")\n #_testGenerateInstances(docPath, useVarlib=USEVARLIBMODEL, useDefcon=objectFlavor==\"defcon\")\n\n\ntestAxisMuting()\n" }, { "alpha_fraction": 0.6830168962478638, "alphanum_fraction": 0.7035865187644958, "avg_line_length": 29.580644607543945, "blob_id": "802e9cd8af030c6f963eea431fbf8344c37e498b", "content_id": "3f6490c386e88feae016f47ec953e7b0f1157af9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1896, "license_type": "permissive", "max_line_length": 114, "num_lines": 62, "path": "/Tests/kerningTest.py", "repo_name": "punchcutter/ufoProcessor", "src_encoding": "UTF-8", "text": "from fontMath.mathKerning import MathKerning\n\nimport fontMath.mathKerning\nfrom defcon.objects.font import Font\nfrom fontParts.fontshell import RFont\nfrom ufoProcessor.varModels import VariationModelMutator\nfrom mutatorMath.objects.mutator import buildMutator, Location\nfrom fontTools.designspaceLib import AxisDescriptor\n\n# kerning exception value. Different results for 1 and 0\nvalue = 0\n\n#f = Font()\nf = RFont()\t\t# doesn't make a difference\nf.groups[\"public.kern1.groupA\"] = ['one', 'Bee']\nf.groups[\"public.kern2.groupB\"] = ['two', 'Three']\nf.kerning[('public.kern1.groupA', 'public.kern2.groupB')] = -100\nf.kerning[(\"one\", \"two\")] = value\n\nm = MathKerning(f.kerning, f.groups)\nprint(\"mathKerning object items:\", m.items())\nprint(\"\\tpair\", ('public.kern1.groupA', 'public.kern2.groupB'), m[('public.kern1.groupA', 'public.kern2.groupB')])\nprint(\"\\tpair\", ('public.kern1.groupA', 'two'), m[('public.kern1.groupA', 'two')])\nprint(\"\\tpair\", ('one', 'public.kern2.groupB'), m[('one', 'public.kern2.groupB')])\nprint(\"\\tpair\", ('one', 'two'), m[('one', 'two')])\n\nitems = [(Location(w=0), m), (Location(w=1), m)]\na = AxisDescriptor()\na.name = \"w\"\na.minimum = 0\na.default = 0\na.maximum = 1\n\n# process with varlib.model\nmut1 = VariationModelMutator(items, [a])\nm1i = mut1.makeInstance(dict(w=1))\nprint(\"\\n#varlib\")\nprint(m1i.items())\n\n# process with mutator\nbias, mut2 = buildMutator(items)\nm2i = mut2.makeInstance(dict(w=1))\nprint(\"\\n#mutator\")\nprint(m2i.items())\n\n# process with the same mathematical operations on a naked mathKerning object\nv = None\ndeltas = [m, m]\nscalars = [1.0, 1.0]\nassert len(deltas) == len(scalars)\nfor i,(delta,scalar) in enumerate(zip(deltas, scalars)):\n\tif not scalar: continue\n\tcontribution = delta * scalar\n\tif v is None:\n\t\tv = contribution\n\telse:\n\t\tv += contribution\nprint(\"\\n#doing the math that varlib does\")\nprint(v.items())\n\nprint(m.groups())\nprint((m*2.0).groups())\n" }, { "alpha_fraction": 0.6476964950561523, "alphanum_fraction": 0.6747967600822449, "avg_line_length": 29.58333396911621, "blob_id": "6fa6f4ce43227d108ac5b183d3afbb1410e57077", "content_id": "fd9e2d1acbce33b0db1cc8a02503d2d55d44d170", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 369, "license_type": "permissive", "max_line_length": 64, "num_lines": 12, "path": "/Tests/mathKerningTest.py", "repo_name": "punchcutter/ufoProcessor", "src_encoding": "UTF-8", "text": "from fontMath.mathKerning import MathKerning\nfrom defcon.objects.font import Font\n\nf = Font()\nf.groups[\"public.kern1.groupA\"] = ['one', 'Bee']\nf.groups[\"public.kern2.groupB\"] = ['two', 'Three']\nf.kerning[('public.kern1.groupA', 'public.kern2.groupB')] = -100\nf.kerning[('one', 'two')] = 0\nm = MathKerning(f.kerning, f.groups)\n\nprint(m.items())\nprint((m*1.0).items())\n\n\n" } ]
6
naveenbhurli/bingWallpaperOfTheDay
https://github.com/naveenbhurli/bingWallpaperOfTheDay
09e20fc09dd2e5738d14f48f0d9f1396caff79e0
c013637b5c8892c75f2254e3bc8d3bded19a232f
b981e84a547c768ffed4391557c847b15091b306
refs/heads/master
2021-06-26T07:50:22.829075
2017-09-15T15:43:33
2017-09-15T15:43:33
103,673,252
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6381215453147888, "alphanum_fraction": 0.6484806537628174, "avg_line_length": 30.200000762939453, "blob_id": "9c1d793b39a7bb0c4ceef61528fe4a887d45f651", "content_id": "51eaa37868882c4a31b471901e6e8e0ed693e175", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2896, "license_type": "no_license", "max_line_length": 118, "num_lines": 90, "path": "/setWallpaper.py", "repo_name": "naveenbhurli/bingWallpaperOfTheDay", "src_encoding": "UTF-8", "text": "\"\"\"\r\n\r\nAuthor: Naveen Bhurli\r\n\r\nScript that fetches Bing Photo of the day, and set it as desktop wallpaper.\r\nThis script runs in the background as soon as the OS boots up and starts working once it is connected to the internet.\r\nIt uses simple urllib.request, json and winreg modules that are already part of Python base installation.\r\nThe script fetches Bing wallpaper of the day, saves it onto the local system and sets the wallpaper.\r\n\r\n\"\"\"\r\n\r\n\r\n# Execute this command after setting wallpaper \"RUNDLL32.EXE USER32.DLL,UpdatePerUserSystemParameters 1, True\"\r\n# urlretrieve\r\n\r\nfrom urllib.request import *\r\nimport json\r\nfrom winreg import *\r\nimport os,datetime,re,time\r\nimport subprocess\r\n\r\n\r\n# method to connect to the Internet and fetches photo of the day.\r\n\r\ndef get_image_from_bing():\r\n present_day = datetime.datetime.today()\r\n bingWallpaperUrl = \"http://www.bing.com/HPImageArchive.aspx?format=js&idx=0&n=1&mkt=en-IN\"\r\n\r\n with urlopen(bingWallpaperUrl) as content:\r\n content = urlopen(bingWallpaperUrl)\r\n\r\n if content.getcode() == 200:\r\n html = json.load(content)\r\n image = \"https://www.bing.com\" + html['images'][0]['url']\r\n image_path = html['images'][0]['url']\r\n image_name = re.search(r'/rb/([^/]+)', image_path).group(1)\r\n\r\n bing_wallpaper_folder = create_folder_for_images()\r\n\r\n current_day_image = bing_wallpaper_folder+\"\\\\\"+image_name\r\n\r\n\r\n\r\n\r\n urlretrieve(image, current_day_image)\r\n return current_day_image\r\n\r\n\r\n\r\n else:\r\n print(\"Connectivity problem\")\r\n\r\ndef create_folder_for_images():\r\n user_profile = os.environ['userprofile']\r\n bing_wallpaper_folder = user_profile + \"\\\\Documents\\\\BingPhotoOfTheDay\"\r\n if not os.path.exists(bing_wallpaper_folder):\r\n os.makedirs(bing_wallpaper_folder)\r\n return bing_wallpaper_folder\r\n\r\n# wallpaper path = HKEY_CURRENT_USER\\Control Panel\\Desktop\\WallPaper\r\n\r\ndef set_wallpaper(image):\r\n Registry = ConnectRegistry(None, HKEY_CURRENT_USER)\r\n raw_key = OpenKey(Registry, \"Control Panel\\Desktop\", 0, KEY_ALL_ACCESS)\r\n # query_reg_value = QueryValueEx(raw_key, \"WallPaper\")\r\n # print(query_reg_value)\r\n SetValueEx(raw_key, \"WallPaper\", 1, REG_SZ, image)\r\n CloseKey(Registry)\r\n\r\n\r\n\r\n# def check_for_existing_image():\r\n#\r\n# if os.path.exists()\r\n\r\n\r\n# set_wallpaper(get_image_from_bing())\r\n\r\n# create_folder_for_images()\r\nbing = \"https://www.bing.com\"\r\nwith urlopen(bing) as conn:\r\n if conn.getcode() == 200:\r\n\r\n while True:\r\n current_day_image = get_image_from_bing()\r\n if not os.path.isfile(current_day_image):\r\n set_wallpaper(current_day_image)\r\n for refresh in range(1, 15):\r\n subprocess.call([\"RUNDLL32.EXE\", \"USER32.DLL\", \"UpdatePerUserSystemParameters\", \"1\", \"True\"])\r\n time.sleep(3600)" } ]
1
pate0793/Python_lab3
https://github.com/pate0793/Python_lab3
ce83cb5d8d3824eed1f0169a1a91ad309b3994aa
e3e2fdc236b8f618bf7600d9625432092dc12509
bc01103e2f169ffc63d8892c1deb6957f6abdb09
refs/heads/master
2020-07-29T01:20:22.532525
2019-09-23T15:21:23
2019-09-23T15:21:23
209,613,626
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6480447053909302, "alphanum_fraction": 0.6536312699317932, "avg_line_length": 17.88888931274414, "blob_id": "531d07a287a30819498ce30f5941a88c8172443d", "content_id": "68a7e919505aa9ba79edfc4a5b30821889531ec9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 179, "license_type": "no_license", "max_line_length": 49, "num_lines": 9, "path": "/areaOfCircle.py", "repo_name": "pate0793/Python_lab3", "src_encoding": "UTF-8", "text": "import math\r\n\r\nradius = input(\"Please Enter radius>>>>\")\r\nuserRadius = int(radius)\r\n\r\nareaCircle = math.pi*(userRadius**2)\r\n\r\n\r\nprint(\"Your Area of Circle is >>>>>>\",areaCircle)\r\n" }, { "alpha_fraction": 0.7153284549713135, "alphanum_fraction": 0.7153284549713135, "avg_line_length": 28.44444465637207, "blob_id": "90dd17fa9a4a8d6340482877509f6ed1119c8a4a", "content_id": "7244a774bb5cedeb68a858faa38210abf965018f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 274, "license_type": "no_license", "max_line_length": 65, "num_lines": 9, "path": "/carMPG.py", "repo_name": "pate0793/Python_lab3", "src_encoding": "UTF-8", "text": "miles = input(\"Please Enter Your Car Miles>>>>\")\r\nuserMiles = int(miles)\r\n\r\ngallons = input(\"Please Enter The Gallons That You Have Used\")\r\nuserGallons = float(gallons)\r\n\r\nuserMPG = userMiles/userGallons\r\n\r\nprint(\"This is your car MPG according to your input>>>>\",userMPG)\r\n" }, { "alpha_fraction": 0.6717948913574219, "alphanum_fraction": 0.692307710647583, "avg_line_length": 25.85714340209961, "blob_id": "9eccc925f7a86bbfb612b18f9380ac4f5c2b4f7a", "content_id": "459953acc9b35c470c819e14ce6463d728807d33", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 195, "license_type": "no_license", "max_line_length": 72, "num_lines": 7, "path": "/degreeFtoC.py", "repo_name": "pate0793/Python_lab3", "src_encoding": "UTF-8", "text": "ferenheit = input(\"Please Enter Fahrenheit To Convert Into Celsius>>>>\")\r\n\r\nuserFerenheit = int(ferenheit)\r\n\r\ncelsius = (userFerenheit - 32)*(5/9)\r\n\r\nprint(\"Your Celsius Degree is>>>>\",celsius)\r\n" } ]
3
capacitr/cap-blog
https://github.com/capacitr/cap-blog
93aaeacae357aef616ab53dd733d6a108c510a1d
5763e83c13b7afda4236269629bf02f4599ae142
a56c0e37afa62a9a2c16c17de66d273e4e52e19a
refs/heads/master
2021-01-10T12:59:04.529168
2013-03-06T16:24:53
2013-03-06T16:24:53
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6682464480400085, "alphanum_fraction": 0.6966824531555176, "avg_line_length": 29, "blob_id": "93f8cfa3aea2b7f27f0d834659d4cf68fa16f780", "content_id": "0cbd50f2f38aee01268143259a59097b5dc8bcdc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 211, "license_type": "no_license", "max_line_length": 54, "num_lines": 7, "path": "/cap_blog/views.py", "repo_name": "capacitr/cap-blog", "src_encoding": "UTF-8", "text": "from django.shortcuts import render, get_object_or_404\n\nimport models\n\ndef get_post(req, post_slug=None):\n p = get_object_or_404(models.Post, slug=post_slug)\n return render(req, 'post.html', {'post': p})\n\n" }, { "alpha_fraction": 0.6088560819625854, "alphanum_fraction": 0.6199262142181396, "avg_line_length": 21.5, "blob_id": "43e2b6c379721c22b7d53e7ba9b89dcaaff2f023", "content_id": "b1917d40d26688eb30434c578822aee4dfeed84a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 271, "license_type": "no_license", "max_line_length": 46, "num_lines": 12, "path": "/setup.py", "repo_name": "capacitr/cap-blog", "src_encoding": "UTF-8", "text": "from distutils.core import setup\n\nsetup(\n name='CapBlog',\n version='0.2.0',\n author='Patrick Aubin',\n author_email='[email protected]',\n packages=['cap_blog'],\n include_package_data=True,\n url='http://capacitr.com/',\n description='A simple django-based blog.',\n)\n\n" }, { "alpha_fraction": 0.6629213690757751, "alphanum_fraction": 0.6629213690757751, "avg_line_length": 24.285715103149414, "blob_id": "bca8aadd1ed86c8ab84cae887101b92e6c7df308", "content_id": "a65a9efbe6279442d9b161612a6108dbe76679eb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 178, "license_type": "no_license", "max_line_length": 71, "num_lines": 7, "path": "/cap_blog/urls.py", "repo_name": "capacitr/cap-blog", "src_encoding": "UTF-8", "text": "from django.conf.urls.defaults import patterns, include, url\n\nimport views\n\nurlpatterns = patterns('',\n url(r'^(?P<post_slug>[\\w-]+)/$', views.get_post, name='blog_post'),\n)\n\n" }, { "alpha_fraction": 0.6049149632453918, "alphanum_fraction": 0.6309073567390442, "avg_line_length": 28.375, "blob_id": "b78c4f19cd14ae49d6774a5e668231f235187f93", "content_id": "2b195b3329215e2cf0526c405d1b668312856242", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2116, "license_type": "no_license", "max_line_length": 72, "num_lines": 72, "path": "/cap_blog/models.py", "repo_name": "capacitr/cap-blog", "src_encoding": "UTF-8", "text": "from django.db import models\n\nfrom thumbnail_works.fields import EnhancedImageField\n\nclass Tag(models.Model):\n\n tag = models.CharField(max_length=255)\n slug = models.SlugField(max_length=255, unique=True)\n\n def __unicode__(self):\n return self.tag\n\nclass Post(models.Model):\n date_created = models.DateTimeField(auto_now_add=True)\n\n author = models.ForeignKey('auth.User', editable=False)\n title = models.CharField(max_length=255)\n subtitle = models.CharField(max_length=255, blank=True)\n slug = models.SlugField(max_length=255, unique=True)\n\n image = EnhancedImageField(\n upload_to = 'uploads',\n blank=True,\n process_source = dict(\n size='940x500', sharpen=True, upscale=False, format='JPEG'),\n thumbnails = {\n 'avatar': dict(size='110x110'),\n 'wide_avatar': dict(size='202x70'),\n 'blog_avatar': dict(size='650x325'),\n }\n )\n\n body = models.TextField(blank=True, default=\"\")\n link = models.FileField(upload_to=\"uploads\", default=\"\", blank=True)\n tags = models.ManyToManyField(Tag, related_name=\"posts\")\n\n image_link = models.CharField(max_length=255, blank=True)\n date_time = models.CharField(max_length=255, blank=True)\n\n order = models.IntegerField(default=0, blank=True)\n\n publish = models.BooleanField(default=False)\n\n @property\n def show_tags(self):\n return \"\\n\".join([t.__unicode__() for t in self.tags.all()])\n\n def __unicode__(self):\n return \"%s\" % (self.title)\n\n @models.permalink\n def get_absolute_url(self):\n return ('blog_post', (), {'post_slug' : self.slug} )\n\n class Meta:\n ordering = [\"-date_created\"]\n\nCHOICES = (\n ('t', 'text'),\n ('i', 'image'),\n ('f', 'file')\n )\n\nclass Attribute(models.Model):\n post = models.ForeignKey('cap_blog.Post')\n name = models.CharField(max_length=100)\n slug = models.SlugField(max_length=100)\n value = models.CharField(max_length=255)\n attribute_type = models.CharField(max_length=1, choices=CHOICES)\n\n class Meta:\n unique_together = ('post', 'slug')\n\n" }, { "alpha_fraction": 0.6243550181388855, "alphanum_fraction": 0.6243550181388855, "avg_line_length": 23.820512771606445, "blob_id": "4ed66fc0e2d23e134ed22dd73848074e69e6d513", "content_id": "359c579d6df541b95ffe5ac03497883b203f39e9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 969, "license_type": "no_license", "max_line_length": 74, "num_lines": 39, "path": "/cap_blog/admin.py", "repo_name": "capacitr/cap-blog", "src_encoding": "UTF-8", "text": "from django.contrib import admin\n\nimport models\n\nclass TagAdmin(admin.ModelAdmin):\n list_display = ['tag', 'slug']\n prepopulated_fields = {'slug' : ('tag',)}\n\n class Meta:\n model = models.Tag\n\nclass AttributeInline(admin.TabularInline):\n model = models.Attribute\n\nclass PostAdmin(admin.ModelAdmin):\n list_display = ['title', 'show_tags', 'publish']\n\n list_display = ['date_created', 'title', 'order', 'author', 'publish']\n list_editable = ['order', 'publish']\n\n inlines = [AttributeInline,]\n\n prepopulated_fields = {'slug' : ('title',)}\n\n def save_model(self, request, obj, form, change):\n if not change:\n try:\n obj.author = request.user\n except AttributeError:\n pass\n\n super(PostAdmin, self).save_model(request, obj, form, change)\n\n\n class Meta:\n model = models.Post\n\nadmin.site.register(models.Post, PostAdmin)\nadmin.site.register(models.Tag, TagAdmin)\n\n" }, { "alpha_fraction": 0.6068601608276367, "alphanum_fraction": 0.6114775538444519, "avg_line_length": 23.852458953857422, "blob_id": "1f100f17cc2eca27f280b29865aebcdc5b0f6a2a", "content_id": "af145bab0a07c1c224495ea0c1f4a54eaf1190a2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1516, "license_type": "no_license", "max_line_length": 77, "num_lines": 61, "path": "/cap_blog/templatetags/posts.py", "repo_name": "capacitr/cap-blog", "src_encoding": "UTF-8", "text": "from django import template\n\nregister = template.Library()\n\nfrom cap_blog.models import Post\n\[email protected]_tag\ndef get_posts_by_tag(tag_name=None):\n posts = list(Post.objects.filter(tags__tag__in=[tag_name], publish=True))\n return posts\n\[email protected]_tag\ndef get_posts():\n return Post.objects.filter(publish=True)\n\[email protected]_tag\ndef get_prev_post(tag_name=None, pk=None):\n posts = Post.objects.filter(tags__tag__in=[tag_name], publish=True)\n prev = None\n for post in posts:\n if post.pk == pk:\n return prev\n prev = post\n\n return None\n\[email protected]_tag\ndef get_next_post(tag_name=None, pk=None):\n posts = Post.objects.filter(tags__tag__in=[tag_name], publish=True)\n prev = None\n for post in posts:\n if prev and prev.pk == pk:\n return post\n prev = post\n return None\n\ndef chunks(l, n):\n return [l[i:i+n] for i in range(0, len(l), n)]\n\[email protected]\ndef hyphenate(s, l):\n str_chunks = chunks(s, l)\n i = 0\n for chunk in str_chunks:\n try:\n if chunk[len(chunk)-1] != \" \" and str_chunks[i+1][0] != \" \":\n str_chunks[i] = chunk + \"-\"\n except IndexError:\n pass\n\n i += 1\n str_chunks = \"\".join(str_chunks)\n return str_chunks\n\[email protected]_tag\ndef get_latest_post(tag_name=None):\n try:\n post = Post.objects.filter(tags__tag__in=[tag_name], publish=True)[0]\n except IndexError:\n post = None\n return post\n" } ]
6
embrown/vcs
https://github.com/embrown/vcs
ebf2e20079d6cb0f3706a0fa7c12acd0e365e855
b76a435b9b74b45b247c1cc4c5a1628b5a75c98f
3b7bfadb834dfbc3cf67b6ba7085b125325a52fa
refs/heads/master
2017-12-31T23:22:32.699683
2016-09-21T15:53:10
2016-09-21T15:53:10
68,832,602
0
0
null
2016-09-21T15:45:02
2016-09-15T15:25:40
2016-09-20T18:03:17
null
[ { "alpha_fraction": 0.6437071561813354, "alphanum_fraction": 0.653773844242096, "avg_line_length": 30.64412498474121, "blob_id": "7c46d4f093f7186003ec6787313e50714f8f439b", "content_id": "9b774b3e89196d8d2bd7e84a1ab4ff9eca1602c7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 85629, "license_type": "no_license", "max_line_length": 120, "num_lines": 2706, "path": "/vcs/manageElements.py", "repo_name": "embrown/vcs", "src_encoding": "UTF-8", "text": "# This file aims at removing elets creation from dpeending on a Canvas, we will try to simply have\n# b = vcs.createboxfill()\n# rather than\n# x=vcs.init()\n# b=x.createboxfill()\nimport vcs\nimport boxfill\nimport meshfill\nimport isofill\nimport isoline\nimport unified1D\nimport template\nimport projection\nimport colormap\nimport fillarea\nimport marker\nimport line\nimport texttable\nimport textorientation\nimport textcombined\nimport vector\nfrom xmldocs import plot_keywords_doc, graphics_method_core, axesconvert,\\\n create_GM_input, get_GM_input, boxfill_output, \\\n isofill_output, isoline_output, yxvsx_output, xyvsy_output, xvsy_output,\\\n scatter_output, get_fillarea_doc, get_texttable_doc\nimport random\nfrom error import vcsError\nimport warnings\nimport dv3d\n\n\ndef check_name_source(name, source, typ):\n \"\"\"make sure it is a unique name for this type or generates a name for user\"\"\"\n elts = vcs.listelements(typ)\n if name is None:\n rnd = random.randint(0, 1000000000000000)\n name = '__%s_%i' % (typ, rnd)\n while name in elts:\n rnd = random.randint(0, 1000000000000000)\n name = '__%s_%i' % (typ, rnd)\n if isinstance(name, unicode):\n name = str(name)\n if not isinstance(name, str):\n raise vcsError(\n '%s object name must be a string or %s name' %\n (typ, typ))\n\n if not isinstance(source, str):\n exec(\"ok = vcs.is%s(source)\" % (typ,))\n else:\n ok = 0\n if (not isinstance(source, str)) and ok == 0:\n raise vcsError(\n 'Error %s object source must be a string or a %s object' %\n (typ, typ))\n elif ok:\n source = source.name\n\n if name in elts:\n raise vcsError(\"Error %s object named %s already exists\" % (typ, name))\n if source not in elts and typ != \"display\":\n raise vcsError(\n \"Error source %s object (%s) does not exist!\" %\n (typ, source))\n return name, source\n\n\ndef createtemplate(name=None, source='default'):\n \"\"\"\n Create a new template given the the name and the existing template to copy\n the attributes from. If no existing template name is given, then the default\n template will be used as the template to which the attributes will be copied\n from.\n\n If the name provided already exists, then an error will be returned. Template\n names must be unique.\n\n :Example:\n\n ::\n\n # create 'example1' template from 'default' template\n con=vcs.createtemplate('example1')\n # Show all the existing templates\n vcs.listelements('template')\n # create 'example2' from 'quick' template\n con=vcs.createtemplate('example2','quick')\n\n\n :param name: The name of the created object\n :type name: str\n\n :param source: The object to inherit from\n :type source: a template or a string name of a template\n\n :returns: A template\n :rtype: vcs.template.P\n\n \"\"\"\n name, source = check_name_source(name, source, 'template')\n\n return template.P(name, source)\n\n\ndef gettemplate(Pt_name_src='default'):\n \"\"\"\n VCS contains a list of predefined templates. This function will create a\n template class object from an existing VCS template. If no template name\n is given, then template 'default' will be used.\n\n Note, VCS does not allow the modification of `default' attribute\n sets. However, a `default' attribute set that has been copied under a\n different name can be modified. (See the createtemplate function.)\n\n :Example:\n\n ::\n\n # Show all the existing templates\n vcs.listelements('template')\n # templt instance of 'default' template\n templt=vcs.gettemplate()\n # templt2 contains 'quick' template\n templt2=vcs.gettemplate('quick')\n\n :param Pt_name_src: String name of an existing template VCS object\n :type Pt_name_src:\n\n :returns: A VCS template object\n :rtype: vcs.template.P\n \"\"\"\n # Check to make sure the argument passed in is a STRING\n if not isinstance(Pt_name_src, str):\n raise vcsError('The argument must be a string.')\n\n if Pt_name_src not in vcs.elements[\"template\"].keys():\n raise ValueError(\"template '%s' does not exists\" % Pt_name_src)\n return vcs.elements[\"template\"][Pt_name_src]\n\n\ndef createprojection(name=None, source='default'):\n \"\"\"\n Create a new projection method given the the name and the existing\n projection method to copy the attributes from. If no existing\n projection method name is given, then the default projection\n method will be used as the projection method to which the attributes will\n be copied from.\n\n If the name provided already exists, then an error will be returned. Projection\n method names must be unique.\n\n :Example:\n\n ::\n\n vcs.show('projection')\n p=vcs.createprojection('example1',)\n vcs.show('projection')\n p=vcs.createprojection('example2','quick')\n vcs.show('projection')\n\n :param name: The name of the created object\n :type name: str\n\n :param source: The object to inherit from\n :type source: a projection or a string name of a projection\n\n :returns: A projection graphics method object\n :rtype: vcs.projection.Proj\n \"\"\"\n\n name, source = check_name_source(name, source, 'projection')\n return projection.Proj(name, source)\n\n\ndef getprojection(Proj_name_src='default'):\n \"\"\"\n VCS contains a list of graphics methods. This function will create a\n projection class object from an existing VCS projection method. If\n no projection name is given, then projection 'default' will be used.\n\n Note, VCS does not allow the modification of `default' attribute\n sets. However, a `default' attribute set that has been copied under a\n different name can be modified. (See the createprojection function.)\n\n :Example:\n\n ::\n\n # Show all the existing projection methods\n vcs.show('projection')\n # box instance of 'default' projection method\n p=vcs.getprojection()\n # box2 instance of existing 'quick' projection graphics method\n p2=vcs.getprojection('quick')\n\n :param Proj_name_src: String name of an existing VCS projection object\n :type Proj_name_src: str\n\n :returns: A VCS projection object\n :rtype: vcs.projection.Proj\n \"\"\"\n\n # Check to make sure the argument passed in is a STRING\n if not isinstance(Proj_name_src, str):\n raise vcsError('The argument must be a string.')\n\n if Proj_name_src not in vcs.elements[\"projection\"]:\n raise vcsError(\"No such projection '%s'\" % Proj_name_src)\n return vcs.elements[\"projection\"][Proj_name_src]\n\n\ndef createboxfill(name=None, source='default'):\n \"\"\"\n\n Create a new boxfill graphics method given the the name and the existing\n boxfill graphics method to copy the attributes from. If no existing\n boxfill graphics method name is given, then the default boxfill graphics\n method will be used as the graphics method to which the attributes will\n be copied from.\n\n .. note::\n\n If the name provided already exists, then an error will be returned. Graphics\n method names must be unique.\n\n :Example:\n\n ::\n\n vcs.show('boxfill')\n box=vcs.createboxfill('example1',)\n vcs.show('boxfill')\n box=vcs.createboxfill('example2','quick')\n vcs.show('boxfill')\n\n :param name: The name of the created object\n :type name: str\n\n :param source: The object to inherit from\n :type source: a boxfill or a string name of a boxfill\n\n :return: A boxfill graphics method object\n :rtype: vcs.boxfill.Gfb\n\n %s\n %s\n %s\n %s\n %s\n \"\"\"\n\n name, source = check_name_source(name, source, 'boxfill')\n return boxfill.Gfb(name, source)\ncreateboxfill.__doc__ = createboxfill.__doc__ % (\n plot_keywords_doc, graphics_method_core, axesconvert, create_GM_input, boxfill_output)\n\n\ndef getboxfill(Gfb_name_src='default'):\n \"\"\"\n VCS contains a list of graphics methods. This function will create a\n boxfill class object from an existing VCS boxfill graphics method. If\n no boxfill name is given, then boxfill 'default' will be used.\n\n Note, VCS does not allow the modification of `default' attribute\n sets. However, a `default' attribute set that has been copied under a\n different name can be modified. (See the createboxfill function.)\n\n :Example:\n\n ::\n\n # Show all the existing boxfill graphics methods\n vcs.show('boxfill')\n # box instance of 'default' boxfill graphics method\n box=vcs.getboxfill()\n # box2 instance of existing 'quick' boxfill graphics method\n box2=vcs.getboxfill('quick')\n\n :param Gfb_name_src: String name of an existing boxfill VCS object\n :type Gfb_name_src: str\n\n :return: A pre-existing boxfill graphics method\n :rtype: vcs.boxfill.Gfb\n\n %s\n %s\n %s\n %s\n %s\n \"\"\"\n # Check to make sure the argument passed in is a STRING\n if not isinstance(Gfb_name_src, str):\n raise vcsError('The argument must be a string.')\n\n if Gfb_name_src not in vcs.elements[\"boxfill\"].keys():\n raise \"The boxfill method: '%s' does not seem to exist\"\n return vcs.elements[\"boxfill\"][Gfb_name_src]\ngetboxfill.__doc__ = getboxfill.__doc__ % \\\n (plot_keywords_doc, graphics_method_core, axesconvert, get_GM_input, boxfill_output)\n\n\ndef createtaylordiagram(name=None, source='default'):\n \"\"\"\n Create a new taylordiagram graphics method given the the name and the existing\n taylordiagram graphics method to copy the attributes from. If no existing\n taylordiagram graphics method name is given, then the default taylordiagram graphics\n method will be used as the graphics method to which the attributes will\n be copied from.\n\n If the name provided already exists, then an error will be returned. Graphics\n method names must be unique.\n\n :Example:\n\n ::\n\n vcs.show('taylordiagram')\n td=vcs.createtaylordiagram('example1',)\n vcs.show('taylordiagram')\n td=vcs.createtaylordiagram('example2','quick')\n vcs.show('taylordiagram')\n\n :param name: The name of the created object\n :type name: str\n\n :param source: The object to inherit from\n :type source: a taylordiagram or a string name of a\n\n :returns: A taylordiagram graphics method object\n :rtype: vcs.taylor.Gtd\n \"\"\"\n\n name, source = check_name_source(name, source, 'taylordiagram')\n if name in vcs.elements[\"taylordiagram\"].keys():\n raise vcsError(\n 'Error creating taylordiagram graphic method: ' +\n name +\n ' already exist')\n if source not in vcs.elements[\"taylordiagram\"].keys():\n raise vcsError(\n 'Error creating taylordiagram graphic method ' +\n source +\n ' does not exist')\n n = vcs.taylor.Gtd(name, source)\n return n\n\n\ndef gettaylordiagram(Gtd_name_src='default'):\n \"\"\"\n VCS contains a list of graphics methods. This function will create a\n taylordiagram class object from an existing VCS taylordiagram graphics method. If\n no taylordiagram name is given, then taylordiagram 'default' will be used.\n\n Note, VCS does not allow the modification of `default' attribute\n sets. However, a `default' attribute set that has been copied under a\n different name can be modified. (See the createboxfill function.)\n\n :Example:\n\n ::\n\n # Show all the existing taylordiagram graphics methods\n vcs.show('taylordiagram')\n # td instance of 'default' taylordiagram graphics method\n td=vcs.gettaylordiagram()\n # td2 instance of existing 'default' taylordiagram graphics method\n td2=vcs.gettaylordiagram('default')\n\n :param Gtd_name_src: String name of an existing taylordiagram VCS object\n :type Gtd_name_src: str\n\n :returns: A taylordiagram VCS object\n :rtype: vcs.taylor.Gtd\n \"\"\"\n\n # Check to make sure the argument passed in is a STRING\n if not isinstance(Gtd_name_src, str):\n raise vcsError('The argument must be a string.')\n\n if Gtd_name_src not in vcs.elements[\"taylordiagram\"].keys():\n raise vcsError(\n \"The taylordiagram graphic method %s does not exists\" %\n Gtd_name_src)\n else:\n return vcs.elements[\"taylordiagram\"][Gtd_name_src]\n\n\ndef createmeshfill(name=None, source='default'):\n \"\"\"\n Create a new meshfill graphics method given the the name and the existing\n meshfill graphics method to copy the attributes from. If no existing\n meshfill graphics method name is given, then the default meshfill graphics\n method will be used as the graphics method to which the attributes will\n be copied from.\n\n .. note::\n\n If the name provided already exists, then an error will be returned. Graphics\n method names must be unique.\n\n :Example:\n\n ::\n\n vcs.show('meshfill')\n mesh=vcs.createmeshfill('example1')\n vcs.show('meshfill')\n mesh=vcs.createmeshfill('example2','quick')\n vcs.show('meshfill')\n\n :param name: The name of the created object\n :type name: str\n\n :param source: The object to inherit from\n :type source: a meshfill or a string name of a meshfill\n\n :returns: A meshfill graphics method object\n :rtype: vcs.meshfill.Gfm\n \"\"\"\n name, source = check_name_source(name, source, 'meshfill')\n return meshfill.Gfm(name, source)\n\n\ndef getmeshfill(Gfm_name_src='default'):\n \"\"\"\n VCS contains a list of graphics methods. This function will create a\n meshfill class object from an existing VCS meshfill graphics method. If\n no meshfill name is given, then meshfill 'default' will be used.\n\n .. note::\n\n VCS does not allow the modification of `default' attribute\n sets. However, a `default' attribute set that has been copied under a\n different name can be modified. (See the createmeshfill function.)\n\n :Example:\n\n ::\n\n a=vcs.init()\n # Show all the existing meshfill graphics methods\n a.show('meshfill')\n # mesh instance of 'default' meshfill graphics method\n mesh=a.getmeshfill()\n # mesh2 instance of existing 'quick' meshfill graphics method\n mesh2=a.getmeshfill('quick')\n\n :param Gfm_name_src: String name of an existing meshfill VCS object\n :type Gfm_name_src: str\n\n :returns: A meshfill VCS object\n :rtype: vcs.meshfill.Gfm\n \"\"\"\n\n # Check to make sure the argument passed in is a STRING\n if not isinstance(Gfm_name_src, str):\n raise vcsError('The argument must be a string.')\n\n if Gfm_name_src not in vcs.elements[\"meshfill\"]:\n raise ValueError(\"meshfill '%s' does not exists\" % Gfm_name_src)\n\n return vcs.elements[\"meshfill\"][Gfm_name_src]\n\n\ndef createisofill(name=None, source='default'):\n \"\"\"\n\n Create a new isofill graphics method given the the name and the existing\n isofill graphics method to copy the attributes from. If no existing\n isofill graphics method name is given, then the default isofill graphics\n method will be used as the graphics method to which the attributes will\n be copied from.\n\n .. note::\n\n If the name provided already exists, then an error will be returned. Graphics\n method names must be unique.\n\n :Example:\n\n ::\n\n vcs.show('isofill')\n iso=vcs.createisofill('example1')\n vcs.show('isofill')\n iso=vcs.createisofill('example2','quick')\n vcs.show('isofill')\n\n :param name: The name of the created object\n :type name: str\n\n :param source: The object to inherit from\n :type source: an isofill object, or string name of an isofill object\n\n :returns: An isofill graphics method\n :rtype: vcs.isofill.Gfi\n\n %s\n %s\n %s\n %s\n %s\n \"\"\"\n\n name, source = check_name_source(name, source, 'isofill')\n return isofill.Gfi(name, source)\ncreateisofill.__doc__ = createisofill.__doc__ % (\n plot_keywords_doc, graphics_method_core, axesconvert, create_GM_input, isofill_output)\n\n\ndef getisofill(Gfi_name_src='default'):\n \"\"\"\n\n VCS contains a list of graphics methods. This function will create a\n isofill class object from an existing VCS isofill graphics method. If\n no isofill name is given, then isofill 'default' will be used.\n\n .. note::\n\n VCS does not allow the modification of `default' attribute\n sets. However, a `default' attribute set that has been copied under a\n different name can be modified. (See the createisofill function.)\n\n :Example:\n\n ::\n\n # Show all the existing isofill graphics methods\n vcs.show('isofill')\n # iso instance of 'default' isofill graphics method\n iso=vcs.getisofill()\n # iso2 instance of existing 'quick' isofill graphics method\n iso2=vcs.getisofill('quick')\n\n :param Gfi_name_src: String name of an existing isofill VCS object\n :type Gfi_name_src: str\n\n :returns: The specified isofill VCS object\n :rtype: vcs.isofill.Gfi\n\n %s\n %s\n %s\n %s\n %s\n \"\"\"\n\n # Check to make sure the argument passed in is a STRING\n if not isinstance(Gfi_name_src, str):\n raise vcsError('The argument must be a string.')\n\n if Gfi_name_src not in vcs.elements[\"isofill\"]:\n raise ValueError(\"The isofill '%s' does not exists\" % Gfi_name_src)\n return vcs.elements[\"isofill\"][Gfi_name_src]\ngetisofill.__doc__ = getisofill.__doc__ % (\n plot_keywords_doc, graphics_method_core, axesconvert, get_GM_input, isofill_output)\n\n\ndef createisoline(name=None, source='default'):\n \"\"\"\n\n Create a new isoline graphics method given the the name and the existing\n isoline graphics method to copy the attributes from. If no existing\n isoline graphics method name is given, then the default isoline graphics\n method will be used as the graphics method to which the attributes will\n be copied from.\n\n .. note::\n\n If the name provided already exists, then an error will be returned. Graphics\n method names must be unique.\n\n :Example:\n\n ::\n\n vcs.show('isoline')\n iso=vcs.createisoline('example1')\n vcs.show('isoline')\n iso=vcs.createisoline('example2','quick')\n vcs.show('isoline')\n\n :param name: The name of the created object\n :type name: str\n\n :param source: The object to inherit from\n :type source: an isoline object, or string name of an isoline object\n\n :returns: An isoline graphics method object\n :rtype: vcs.isoline.Gi\n\n %s\n %s\n %s\n %s\n %s\n \"\"\"\n\n name, source = check_name_source(name, source, 'isoline')\n return isoline.Gi(name, source)\ncreateisoline.__doc__ = createisoline.__doc__ % (\n plot_keywords_doc, graphics_method_core, axesconvert, create_GM_input, isoline_output)\n\n\ndef getisoline(Gi_name_src='default'):\n \"\"\"\n VCS contains a list of graphics methods. This function will create a\n isoline class object from an existing VCS isoline graphics method. If\n no isoline name is given, then isoline 'default' will be used.\n\n .. note::\n\n VCS does not allow the modification of `default' attribute\n sets. However, a `default' attribute set that has been copied under a\n different name can be modified. (See the createisoline function.)\n\n :Example:\n\n ::\n\n # Show all the existing isoline graphics methods\n vcs.show('isoline')\n # iso instance of 'default' isoline graphics method\n iso=vcs.getisoline()\n # iso2 instance of existing 'quick' isoline graphics method\n iso2=vcs.getisoline('quick')\n gm.linewidth=0\n\n :param Gi_name_src: String name of an existing isoline VCS object\n :type Gi_name_src: str\n\n :returns: The requested isoline VCS object\n :rtype: vcs.isoline.Gi\n\n %s\n %s\n %s\n %s\n %s\n \"\"\"\n\n # Check to make sure the argument passed in is a STRING\n if not isinstance(Gi_name_src, str):\n raise vcsError('The argument must be a string.')\n if Gi_name_src not in vcs.elements[\"isoline\"]:\n raise ValueError(\"The isoline '%s' does not exists\" % Gi_name_src)\n return vcs.elements[\"isoline\"][Gi_name_src]\ngetisoline.__doc__ = getisoline.__doc__ % (\n plot_keywords_doc, graphics_method_core, axesconvert, get_GM_input, isoline_output)\n\n\ndef create1d(name=None, source='default'):\n name, source = check_name_source(name, source, '1d')\n return unified1D.G1d(name, source)\n\n\ndef get1d(name):\n # Check to make sure the argument passed in is a STRING\n if not isinstance(name, str):\n raise vcsError('The argument must be a string.')\n\n if name not in vcs.elements[\"1d\"]:\n raise ValueError(\"The 1d '%s' graphics method does not exists\" % name)\n return vcs.elements[\"1d\"][name]\n\n\ndef createxyvsy(name=None, source='default'):\n \"\"\"\n Create a new Xyvsy graphics method given the the name and the existing\n Xyvsy graphics method to copy the attributes from. If no existing\n Xyvsy graphics method name is given, then the default Xyvsy graphics\n method will be used as the graphics method to which the attributes will\n be copied from.\n\n .. note::\n\n If the name provided already exists, then an error will be returned. Graphics\n method names must be unique.\n\n :Example:\n\n ::\n\n a=vcs.init()\n vcs.show('xyvsy')\n xyy=vcs.createxyvsy('example1',)\n vcs.show('xyvsy')\n xyy=vcs.createxyvsy('example2','quick')\n vcs.show('xyvsy')\n\n\n :param name: The name of the created object\n :type name: str\n\n\n :param source: The object to inherit from\n :type source: a xyvsy or a string name of a xyvsy\n\n :returns: A XYvsY graphics method object\n :rtype: vcs.unified1D.G1d\n\n %s\n %s\n %s\n %s\n %s\n \"\"\"\n try:\n gm = vcs.create1d(name, source)\n except vcsError as ve:\n if ve.message == \"Error 1d object named %s already exists\" % name:\n warning_message = \"A 1D graphics method named %s already exists, creating yours as %s\" % (name,\n name + \"_xyvsy\")\n warnings.warn(warning_message)\n gm = vcs.create1d(name + \"_xyvsy\", source)\n else:\n raise ve\n gm.flip = True\n return gm\ncreatexyvsy.__doc__ = createxyvsy.__doc__ % (\n plot_keywords_doc, graphics_method_core, axesconvert, create_GM_input, xyvsy_output)\n\n\ndef getxyvsy(GXy_name_src='default'):\n \"\"\"\n\n VCS contains a list of graphics methods. This function will create a\n Xyvsy class object from an existing VCS Xyvsy graphics method. If\n no Xyvsy name is given, then Xyvsy 'default' will be used.\n\n .. note::\n\n VCS does not allow the modification of `default' attribute\n sets. However, a `default' attribute set that has been copied under a\n different name can be modified. (See the createxyvsy function.)\n\n :Example:\n\n ::\n\n a=vcs.init()\n # Show all the existing Xyvsy graphics methods\n vcs.show('xyvsy')\n # xyy instance of 'default' Xyvsy graphics method\n xyy=vcs.getxyvsy('default_xyvsy_')\n # xyy2 instance of existing 'quick' Xyvsy graphics method\n xyy2=vcs.getxyvsy('quick')\n\n :param GXy_name_src: String name of an existing Xyvsy graphics method\n :type GXy_name_src: str\n\n :returns: An XYvsY graphics method object\n :rtype: vcs.unified1D.G1d\n\n %s\n %s\n %s\n %s\n %s\n \"\"\"\n gm = vcs.get1d(GXy_name_src)\n if gm.g_type != \"xyvsy\":\n # Already existed when name_src was created, most likely\n return vcs.get1d(GXy_name_src + \"_xyvsy\")\n return gm\ngetxyvsy.__doc__ = getxyvsy.__doc__ % (\n plot_keywords_doc, graphics_method_core, axesconvert, get_GM_input, xyvsy_output)\n\n\ndef createyxvsx(name=None, source='default'):\n \"\"\"\n Create a new Yxvsx graphics method given the the name and the existing\n Yxvsx graphics method to copy the attributes from. If no existing\n Yxvsx graphics method name is given, then the default Yxvsx graphics\n method will be used as the graphics method to which the attributes will\n be copied from.\n\n .. note::\n\n If the name provided already exists, then an error will be returned. Graphics\n method names must be unique.\n\n :Example:\n\n ::\n\n a=vcs.init()\n vcs.show('yxvsx')\n yxx=vcs.createyxvsx('example1',)\n vcs.show('yxvsx')\n yxx=vcs.createyxvsx('example2','quick')\n vcs.show('yxvsx')\n\n\n :param name: The name of the created object\n :type name: str\n\n :param source: The object to inherit from\n :type source: a yxvsy or a string name of a yxvsy\n\n :returns: A YXvsX graphics method object\n :rtype: vcs.unified1D.G1d\n\n %s\n %s\n %s\n %s\n %s\n \"\"\"\n try:\n gm = vcs.create1d(name, source)\n except vcsError as ve:\n if ve.message == \"Error 1d object named %s already exists\" % name:\n warning_message = \"A 1D graphics method named %s already exists, creating yours as %s\" % (name,\n name + \"_yxvsx\")\n warnings.warn(warning_message)\n gm = vcs.create1d(name + \"_yxvsx\", source)\n else:\n raise ve\n return gm\ncreateyxvsx.__doc__ = createyxvsx.__doc__ % (\n plot_keywords_doc, graphics_method_core, axesconvert, create_GM_input, yxvsx_output)\n\n\ndef getyxvsx(GYx_name_src='default'):\n \"\"\"\n\n VCS contains a list of graphics methods. This function will create a\n Yxvsx class object from an existing VCS Yxvsx graphics method. If\n no Yxvsx name is given, then Yxvsx 'default' will be used.\n\n .. note::\n\n VCS does not allow the modification of `default' attribute\n sets. However, a `default' attribute set that has been copied under a\n different name can be modified. (See the createyxvsx function.)\n\n :Example:\n\n ::\n\n a=vcs.init()\n # Show all the existing Yxvsx graphics methods\n vcs.show('yxvsx')\n # yxx instance of 'default' Yxvsx graphics method\n yxx=vcs.getyxvsx()\n # yxx2 instance of existing 'quick' Yxvsx graphics method\n yxx2=vcs.getyxvsx('quick')\n\n :param GYx_name_src: String name of an existing Yxvsx graphics method\n :type GYx_name_src: str\n\n :return: A Yxvsx graphics method object\n :rtype: vcs.unified1D.G1d\n\n %s\n %s\n %s\n %s\n %s\n \"\"\"\n gm = vcs.get1d(GYx_name_src)\n if gm.g_type != \"yxvsx\":\n return vcs.get1d(GYx_name_src + \"_yxvsx\")\n return gm\ngetyxvsx.__doc__ = getyxvsx.__doc__ % (\n plot_keywords_doc, graphics_method_core, axesconvert, get_GM_input, yxvsx_output)\n\n\ndef createxvsy(name=None, source='default'):\n \"\"\"\n Create a new XvsY graphics method given the the name and the existing\n XvsY graphics method to copy the attributes from. If no existing\n XvsY graphics method name is given, then the default XvsY graphics\n method will be used as the graphics method to which the attributes will\n be copied from.\n\n .. note::\n\n If the name provided already exists, then an error will be returned. Graphics\n method names must be unique.\n\n :Example:\n\n ::\n\n a=vcs.init()\n vcs.show('xvsy')\n xy=vcs.createxvsy('example1',)\n vcs.show('xvsy')\n xy=vcs.createxvsy('example2','quick')\n vcs.show('xvsy')\n\n :param name: The name of the created object\n :type name: str\n\n :param source: The object to inherit from\n :type source: a xvsy or a string name of a xvsy\n\n :returns: A XvsY graphics method object\n :rtype: vcs.unified1D.G1d\n\n %s\n %s\n %s\n %s\n %s\n \"\"\"\n try:\n gm = vcs.create1d(name, source)\n except vcsError as ve:\n if ve.message == \"Error 1d object named %s already exists\" % name:\n warning_message = \"A 1D graphics method named %s already exists, creating yours as %s\" % (name,\n name + \"_xvsy\")\n warnings.warn(warning_message)\n gm = vcs.create1d(name + \"_xvsy\", source)\n else:\n raise ve\n return gm\ncreatexvsy.__doc__ = createxvsy.__doc__ % (\n plot_keywords_doc, graphics_method_core, axesconvert, create_GM_input, xvsy_output)\n\n\ndef getxvsy(GXY_name_src='default'):\n \"\"\"\n VCS contains a list of graphics methods. This function will create a\n XvsY class object from an existing VCS XvsY graphics method. If\n no XvsY name is given, then XvsY 'default' will be used.\n\n .. note::\n\n VCS does not allow the modification of `default' attribute\n sets. However, a `default' attribute set that has been copied under a\n different name can be modified. (See the createxvsy function.)\n\n :Example:\n\n::\n\n a=vcs.init()\n # Show all the existing XvsY graphics methods\n vcs.show('xvsy')\n # xy instance of 'default' XvsY graphics method\n xy=vcs.getxvsy()\n # xy2 instance of existing 'quick' XvsY graphics method\n xy2=vcs.getxvsy('quick')\n\n:param GXY_name_src: String name of a 1d graphics method\n:type GXY_name_src: str\n\n:returns: A XvsY graphics method object\n:rtype: vcs.unified1D.G1d\n%s\n%s\n%s\n%s\n%s\n\"\"\"\n gm = vcs.get1d(GXY_name_src)\n # Deliberately yxvsx here; xvsy is just an alias\n if gm.g_type != \"yxvsx\":\n return vcs.get1d(GXY_name_src + \"_xvsy\")\n return gm\ngetxvsy.__doc__ = getxvsy.__doc__ % (\n plot_keywords_doc, graphics_method_core, axesconvert, get_GM_input, xvsy_output)\n\n\ndef createvector(name=None, source='default'):\n \"\"\"\n Create a new vector graphics method given the the name and the existing\n vector graphics method to copy the attributes from. If no existing\n vector graphics method name is given, then the default vector graphics\n method will be used as the graphics method to which the attributes will\n be copied from.\n\n .. note::\n If the name provided already exists, then an error will be returned. Graphics\n method names must be unique.\n\n :Example:\n\n::\n\n a=vcs.init()\n vcs.show('vector')\n vec=vcs.createvector('example1',)\n vcs.show('vector')\n vec=vcs.createvector('example2','quick')\n vcs.show('vector')\n\n\n:param name: The name of the created object\n:type name: str\n\n:param source: The object to inherit from\n:type source: a vector or a string name of a vector\n\n:returns: A vector graphics method object\n:rtype: vcs.vector.Gv\n\n \"\"\"\n name, source = check_name_source(name, source, 'vector')\n return vector.Gv(name, source)\n\n\ndef getvector(Gv_name_src='default'):\n \"\"\"\n VCS contains a list of graphics methods. This function will create a\n vector class object from an existing VCS vector graphics method. If\n no vector name is given, then vector 'default' will be used.\n\n .. note::\n VCS does not allow the modification of `default' attribute\n sets. However, a `default' attribute set that has been copied under a\n different name can be modified. (See the createvector function.)\n\n :Example:\n\n::\n\n a=vcs.init()\n # Show all the existing vector graphics methods\n vcs.show('vector')\n # vec instance of 'default' vector graphics method\n vec=vcs.getvector()\n # vec2 instance of existing 'quick' vector graphics method\n vec2=vcs.getvector('quick')\n\n:param Gv_name_src: String name of an existing vector VCS object\n:type Gv_name_src: str\n\n:returns: A vector graphics method object\n:rtype: vcs.vector.Gv\n \"\"\"\n\n # Check to make sure the argument passed in is a STRING\n if not isinstance(Gv_name_src, str):\n raise vcsError('The argument must be a string.')\n if Gv_name_src not in vcs.elements[\"vector\"]:\n raise ValueError(\"The vector '%s' does not exist\" % Gv_name_src)\n return vcs.elements[\"vector\"][Gv_name_src]\n\n\ndef createscatter(name=None, source='default'):\n \"\"\"\n\n Create a new scatter graphics method given the the name and the existing\n scatter graphics method to copy the attributes from. If no existing\n scatter graphics method name is given, then the default scatter graphics\n method will be used as the graphics method to which the attributes will\n be copied from.\n\n\n .. note::\n If the name provided already exists, then an error will be returned. Graphics\n method names must be unique.\n\n :Example:\n\n::\n\n a=vcs.init()\n vcs.show('scatter')\n sct=vcs.createscatter('example1',)\n vcs.show('scatter')\n sct=vcs.createscatter('example2','quick')\n vcs.show('scatter')\n\n:param name: The name of the created object\n:type name: str\n\n:param source: The object to inherit from\n:type source: a scatter or a string name of a scatter\n\n:return: A scatter graphics method\n:rtype: vcs.unified1D.G1d\n\n%s\n%s\n%s\n%s\n%s\n\"\"\"\n try:\n gm = vcs.create1d(name, source)\n except vcsError as ve:\n if ve.message == \"Error 1d object named %s already exists\" % name:\n warning_message = \"A 1D graphics method named %s already exists, creating yours as %s\" % (name,\n name + \"_scatter\")\n warnings.warn(warning_message)\n gm = vcs.create1d(name + \"_scatter\", source)\n else:\n raise ve\n gm.linewidth = 0\n return gm\ncreatescatter.__doc__ = createscatter.__doc__ % (\n plot_keywords_doc, graphics_method_core, axesconvert, create_GM_input, scatter_output)\n\n\ndef getscatter(GSp_name_src='default'):\n \"\"\"\n\n VCS contains a list of graphics methods. This function will create a\n scatter class object from an existing VCS scatter graphics method. If\n no scatter name is given, then scatter 'default' will be used.\n\n .. note::\n VCS does not allow the modification of `default' attribute\n sets. However, a `default' attribute set that has been copied under a\n different name can be modified. (See the createscatter function.)\n\n :Example:\n\n::\n\n a=vcs.init()\n # Show all the existing scatter graphics methods\n vcs.show('scatter')\n # sct instance of 'default' scatter graphics method\n sct=vcs.getscatter('default_scatter_')\n # sct2 instance of existing 'quick' scatter graphics method\n sct2=vcs.getscatter('quick')\n\n:param GSp_name_src: String name of an existing scatter VCS object.\n:type GSp_name_src: str\n\n:returns: A scatter graphics method object\n:rtype: vcs.unified1D.G1d\n\n%s\n%s\n%s\n%s\n%s\n\"\"\"\n gm = vcs.get1d(GSp_name_src)\n if gm.g_type != \"scatter\":\n return vcs.get1d(GSp_name_src + \"_scatter\")\n return gm\ngetscatter.__doc__ = getscatter.__doc__ % (\n plot_keywords_doc, graphics_method_core, axesconvert, get_GM_input, scatter_output)\n\n\ndef createline(name=None, source='default', ltype=None,\n width=None, color=None, priority=None,\n viewport=None, worldcoordinate=None,\n x=None, y=None, projection=None):\n \"\"\"\n Create a new line secondary method given the the name and the existing\n line secondary method to copy the attributes from. If no existing line\n secondary method name is given, then the default line secondary method\n will be used as the secondary method to which the attributes will be\n copied from.\n\n .. note::\n If the name provided already exists, then an error will be returned.\n Secondary method names must be unique.\n\n :Example:\n\n::\n\n a=vcs.init()\n vcs.show('line')\n ln=vcs.createline('example1')\n vcs.show('line')\n ln=vcs.createline('example2','black')\n vcs.show('line')\n # Create instance of line object 'red'\n ln2=vcs.createline(name='new', name_src='red',ltype='dash', width=2,\n color=242, priority=1, viewport=[0, 2.0, 0, 2.0],\n worldcoordinate=[0,100, 0,50]\n x=[0,20,40,60,80,100],\n y=[0,10,20,30,40,50] )\n # Plot using specified line object\n vcs.line(ln2)\n\n:param name: Name of created object\n:type name: str\n\n:param source: a line, or string name of a line\n:type source: str\n\n:param ltype: One of \"dash\", \"dash-dot\", \"solid\", \"dot\", or \"long-dash\".\n:type ltype: str\n\n:param width: Thickness of the line to be created\n:type width: int\n\n:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,\n or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))\n:type color: str or int\n\n:param priority: The layer on which the line will be drawn.\n:type priority: int\n\n:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas\n:type viewport: list of floats\n\n:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)\n:type worldcoordinate: list of floats\n\n:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].\n:type x: list of floats\n\n:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].\n:type y: list of floats\n\n:param projection: Specify a geographic projection used to convert x/y from spherical coordinates into 2D coordinates.\n:type projection: str or projection object\n\n:returns: A VCS line secondary method object\n:rtype: vcs.line.Tl\n\n\"\"\"\n\n name, source = check_name_source(name, source, 'line')\n\n ln = line.Tl(name, source)\n if (ltype is not None):\n ln.type = ltype\n if (width is not None):\n ln.width = width\n if (color is not None):\n ln.color = color\n if (priority is not None):\n ln.priority = priority\n if (viewport is not None):\n ln.viewport = viewport\n if (worldcoordinate is not None):\n ln.worldcoordinate = worldcoordinate\n if (x is not None):\n ln.x = x\n if (y is not None):\n ln.y = y\n if (projection is not None):\n ln.projection = projection\n return ln\n\n\ndef setLineAttributes(to, l):\n '''\n Set attributes linecolor, linewidth and linetype from line l.\n l can be a line name defined in vcs.elements or a line object\n '''\n import queries\n line = None\n if (queries.isline(l)):\n line = l\n elif l in vcs.elements[\"line\"]:\n line = vcs.elements[\"line\"][l]\n else:\n raise ValueError(\"Expecting a line object or a \" +\n \"line name defined in vcs.elements, got type \" +\n type(l).__name__)\n to.linecolor = line.color[0]\n to.linewidth = line.width[0]\n to.linetype = line.type[0]\n\n\ndef getline(name='default', ltype=None, width=None, color=None,\n priority=None, viewport=None,\n worldcoordinate=None,\n x=None, y=None):\n \"\"\"\n VCS contains a list of secondary methods. This function will create a\n line class object from an existing VCS line secondary method. If\n no line name is given, then line 'default' will be used.\n\n .. note::\n VCS does not allow the modification of `default' attribute sets.\n However, a `default' attribute set that has been copied under a\n different name can be modified. (See the createline function.)\n\n :Example:\n\n::\n\n a=vcs.init()\n # Show all the existing line secondary methods\n vcs.show('line')\n # ln instance of 'default' line secondary method\n ln=vcs.getline()\n # ln2 instance of existing 'quick' line secondary method\n ln2=vcs.getline('quick')\n # Create instance of line object 'red'\n ln3=vcs.getline(name='red', ltype='dash', width=2,\n color=242, priority=1, viewport=[0, 2.0, 0, 2.0],\n worldcoordinate=[0,100, 0,50],\n x=[0,20,40,60,80,100],\n y=[0,10,20,30,40,50] )\n # Plot using specified line object\n vcs.line(ln3)\n\n:param name: Name of created object\n:type name: str\n\n:param ltype: One of \"dash\", \"dash-dot\", \"solid\", \"dot\", or \"long-dash\".\n:type ltype: str\n\n:param width: Thickness of the line to be created\n:type width: int\n\n:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,\n or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))\n:type color: str or int\n\n:param priority: The layer on which the marker will be drawn.\n:type priority: int\n\n:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas\n:type viewport: list of floats\n\n:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)\n:type worldcoordinate: list of floats\n\n:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].\n:type x: list of floats\n\n:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].\n:type y: list of floats\n\n:returns: A VCS line object\n:rtype: vcs.line.Tl\n \"\"\"\n\n # Check to make sure the argument passed in is a STRING\n if not isinstance(name, str):\n raise vcsError('The argument must be a string.')\n\n if name not in vcs.elements[\"line\"]:\n raise ValueError(\"The line '%s' does not exist\" % name)\n ln = vcs.elements[\"line\"][name]\n if ltype is not None and ln.name != 'default':\n ln.type = ltype\n if width is not None and ln.name != 'default':\n ln.width = width\n if color is not None and ln.name != 'default':\n ln.color = color\n if priority is not None and ln.name != 'default':\n ln.priority = priority\n if viewport is not None and ln.name != 'default':\n ln.viewport = viewport\n if worldcoordinate is not None and ln.name != 'default':\n ln.worldcooridnate = worldcoordinate\n if viewport is not None and ln.name != 'default':\n ln.viewport = viewport\n if x is not None and ln.name != 'default':\n ln.x = x\n if y is not None and ln.name != 'default':\n ln.y = y\n return ln\n\n\ndef createmarker(name=None, source='default', mtype=None,\n size=None, color=None, priority=None,\n viewport=None, worldcoordinate=None,\n x=None, y=None, projection=None):\n \"\"\"\n Create a new marker secondary method given the the name and the existing\n marker secondary method to copy the attributes from. If no existing marker\n secondary method name is given, then the default marker secondary method\n will be used as the secondary method to which the attributes will be\n copied from.\n\n .. note::\n If the name provided already exists, then an error will be returned.\n Secondary method names must be unique.\n\n :Example:\n\n::\n\n a=vcs.init()\n vcs.show('marker')\n mrk=vcs.createmarker('example1',)\n vcs.show('marker')\n mrk=vcs.createmarker('example2','black')\n vcs.show('boxfill')\n # Create instance of marker object 'red'\n mrk2=vcs.createmarker(name='new', name_src='red',mtype='dot', size=2,\n color=242, priority=1, viewport=[0, 2.0, 0, 2.0],\n worldcoordinate=[0,100, 0,50]\n x=[0,20,40,60,80,100],\n y=[0,10,20,30,40,50] )\n # Plot using specified marker object\n vcs.marker(mrk2)\n\n\n:param name: Name of created object\n:type name: str\n\n:param source: A marker, or string name of a marker\n:type source: str\n\n:param mtype: Specifies the type of marker, i.e. \"dot\", \"circle\"\n:type mtype: str\n\n:param size:\n:type size: int\n\n:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,\n or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))\n:type color: str or int\n\n:param priority: The layer on which the marker will be drawn.\n:type priority: int\n\n:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas\n:type viewport: list of floats\n\n:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)\n:type worldcoordinate: list of floats\n\n:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].\n:type x: list of floats\n\n:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].\n:type y: list of floats\n\n:returns: A secondary marker method\n:rtype: vcs.marker.Tm\n\n \"\"\"\n\n name, source = check_name_source(name, source, 'marker')\n\n mrk = marker.Tm(name, source)\n if (mtype is not None):\n mrk.type = mtype\n if (size is not None):\n mrk.size = size\n if (color is not None):\n mrk.color = color\n if (priority is not None):\n mrk.priority = priority\n if (viewport is not None):\n mrk.viewport = viewport\n if (worldcoordinate is not None):\n mrk.worldcoordinate = worldcoordinate\n if (x is not None):\n mrk.x = x\n if (y is not None):\n mrk.y = y\n if (projection is not None):\n mrk.projection = projection\n return mrk\n\n\ndef getmarker(name='default', mtype=None, size=None, color=None,\n priority=None, viewport=None,\n worldcoordinate=None,\n x=None, y=None):\n \"\"\"\n VCS contains a list of secondary methods. This function will create a\n marker class object from an existing VCS marker secondary method. If\n no marker name is given, then marker 'default' will be used.\n\n .. note::\n VCS does not allow the modification of `default' attribute sets.\n However, a `default' attribute set that has been copied under a\n different name can be modified. (See the createmarker function.)\n\n :Example:\n\n::\n\n a=vcs.init()\n # Show all the existing marker secondary methods\n vcs.show('marker')\n # mrk instance of 'default' marker secondary method\n mrk=vcs.getmarker()\n # mrk2 instance of existing 'quick' marker secondary method\n mrk2=vcs.getmarker('quick')\n # Create instance of marker object 'red'\n mrk3=vcs.getmarker(name='red', mtype='dash', size=2,\n color=242, priority=1, viewport=[0, 2.0, 0, 2.0],\n worldcoordinate=[0,100, 0,50]\n x=[0,20,40,60,80,100],\n y=[0,10,20,30,40,50] )\n # Plot using specified marker object\n vcs.marker(mrk3)\n\n:param name: Name of created object\n:type name: str\n\n:param source: A marker, or string name of a marker\n:type source: str\n\n:param mtype: Specifies the type of marker, i.e. \"dot\", \"circle\"\n:type mtype: str\n\n:param size: Size of the marker\n:type size: int\n\n:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,\n or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))\n:type color: str or int\n\n:param priority: The layer on which the marker will be drawn.\n:type priority: int\n\n:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas\n:type viewport: list of floats\n\n:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)\n:type worldcoordinate: list of floats\n\n:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].\n:type x: list of floats\n\n:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].\n:type y: list of floats\n\n:returns: A marker graphics method object\n:rtype: vcs.marker.Tm\n\n \"\"\"\n\n # Check to make sure the argument passed in is a STRING\n if not isinstance(name, str):\n raise vcsError('The argument must be a string.')\n\n if name not in vcs.elements[\"marker\"]:\n raise ValueError(\"The marker object '%s' does not exists\")\n mrk = vcs.elements[\"marker\"][name]\n if (mtype is not None) and (mrk.name != \"default\"):\n mrk.type = mtype\n if (size is not None) and (mrk.name != \"default\"):\n mrk.size = size\n if (color is not None) and (mrk.name != \"default\"):\n mrk.color = color\n if (priority is not None) and (mrk.name != \"default\"):\n mrk.priority = priority\n if (viewport is not None) and (mrk.name != \"default\"):\n mrk.viewport = viewport\n if (worldcoordinate is not None) and (mrk.name != \"default\"):\n mrk.worldcoordinate = worldcoordinate\n if (x is not None) and (mrk.name != \"default\"):\n mrk.x = x\n if (y is not None) and (mrk.name != \"default\"):\n mrk.y = y\n return mrk\n\n\ndef createfillarea(name=None, source='default', style=None,\n index=None, color=None, priority=None,\n viewport=None, worldcoordinate=None,\n x=None, y=None):\n \"\"\"\n Create a new fillarea secondary method given the the name and the existing\n fillarea secondary method to copy the attributes from. If no existing fillarea\n secondary method name is given, then the default fillarea secondary method\n will be used as the secondary method to which the attributes will be\n copied from.\n\n .. note::\n If the name provided already exists, then an error will be returned.\n Secondary method names must be unique.\n\n :Example:\n\n::\n\n vcs.show('fillarea')\n fa=vcs.createfillarea('example1',)\n vcs.show('fillarea')\n fa=vcs.createfillarea('example2','black')\n vcs.show('fillarea')\n # Create instance of fill area object 'red'\n fa2=vcs.createmarker(name='new', name_src='red',style=1, index=1,\n color=242, priority=1, viewport=[0, 2.0, 0, 2.0],\n worldcoordinate=[0,100, 0,50]\n x=[0,20,40,60,80,100],\n y=[0,10,20,30,40,50] )\n # Plot using specified fill area object\n vcs.fillarea(fa2)\n\n:param name: Name of created object\n:type name: str\n\n:param source: a fillarea, or string name of a fillarea\n:type source: str\n\n:param style: One of \"hatch\", \"solid\", or \"pattern\".\n:type style: str\n\n:param index: Specifies which `pattern <http://uvcdat.llnl.gov/gallery/fullsize/pattern_chart.png>`_ to fill with.\nAccepts ints from 1-20.\n\n:type index: int\n\n:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,\nor an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))\n\n:type color: str or int\n\n:param priority: The layer on which the fillarea will be drawn.\n:type priority: int\n\n:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas\n:type viewport: list of floats\n\n:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)\n:type worldcoordinate: list of floats\n\n:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].\n:type x: list of floats\n\n:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].\n:type y: list of floats\n\n:returns: A fillarea object\n:rtype: vcs.fillarea.Tf\n\"\"\"\n\n name, source = check_name_source(name, source, 'fillarea')\n\n fa = fillarea.Tf(name, source)\n if (style is not None):\n fa.style = style\n if (index is not None):\n fa.index = index\n if (color is not None):\n fa.color = color\n if (priority is not None):\n fa.priority = priority\n if (viewport is not None):\n fa.viewport = viewport\n if (worldcoordinate is not None):\n fa.worldcoordinate = worldcoordinate\n if (x is not None):\n fa.x = x\n if (y is not None):\n fa.y = y\n return fa\n\n\ndef getfillarea(name='default', style=None,\n index=None, color=None,\n priority=None, viewport=None,\n worldcoordinate=None,\n x=None, y=None):\n \"\"\"\n %s\n\n:param name: String name of an existing fillarea VCS object\n:type name: str\n\n:param style: One of \"hatch\", \"solid\", or \"pattern\".\n:type style: str\n\n:param index: Specifies which `pattern <http://uvcdat.llnl.gov/gallery/fullsize/pattern_chart.png>`_ to fill with.\n Accepts ints from 1-20.\n:type index: int\n\n:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,\n or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))\n:type color: str or int\n\n:param priority: The layer on which the texttable will be drawn.\n:type priority: int\n\n:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas\n:type viewport: list of floats\n\n:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)\n:type worldcoordinate: list of floats\n\n:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].\n:type x: list of floats\n\n:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].\n:type y: list of floats\n\n:returns: A fillarea secondary object\n:rtype: vcs.fillarea.Tf\n\n \"\"\"\n\n # Check to make sure the argument passed in is a STRING\n if not isinstance(name, str):\n raise vcsError('The argument must be a string.')\n if name not in vcs.elements[\"fillarea\"].keys():\n raise vcsError(\"Fillarea '%s' does not exist\" % (name))\n\n fa = vcs.elements[\"fillarea\"][name]\n if (style is not None) and (fa.name != \"default\"):\n fa.style = style\n if (index is not None) and (fa.name != \"default\"):\n fa.index = index\n if (color is not None) and (fa.name != \"default\"):\n fa.color = color\n if (priority is not None) and (fa.name != \"default\"):\n fa.priority = priority\n if (viewport is not None) and (fa.name != \"default\"):\n fa.viewport = viewport\n if (worldcoordinate is not None) and (fa.name != \"default\"):\n fa.worldcoordinate = worldcoordinate\n if (x is not None) and (fa.name != \"default\"):\n fa.x = x\n if (y is not None) and (fa.name != \"default\"):\n fa.y = y\n return fa\ngetfillarea.__doc__ = getfillarea.__doc__ % get_fillarea_doc\n\n\ndef createtexttable(name=None, source='default', font=None,\n spacing=None, expansion=None, color=None, priority=None,\n viewport=None, worldcoordinate=None,\n x=None, y=None):\n \"\"\"\n Create a new texttable secondary method given the the name and the existing\n texttable secondary method to copy the attributes from. If no existing texttable\n secondary method name is given, then the default texttable secondary method\n will be used as the secondary method to which the attributes will be\n copied from.\n\n .. note::\n If the name provided already exists, then an error will be returned.\n Secondary method names must be unique.\n\n .. deprecated:: 1.0\n expansion parameter is no longer used\n\n :Example:\n\n::\n\n # Show names of all available texttable objects\n vcs.show('texttable')\n tt=vcs.createtexttable('example1',)\n vcs.show('texttable')\n tt=vcs.createtexttable('example2','black')\n vcs.show('texttable')\n # Show available fonts\n vcs.show('font')\n # Create instance of texttable object 'new'\n tt=vcs.createtexttable(name='new', source='red',font=1,spacing=1, font='default',\n color=242, priority=1, viewport=[0, 1.0, 0, 1.0],\n worldcoordinate=[0,100, 0,50]\n x=[0,20,40,60,80,100],\n y=[0,10,20,30,40,50] )\n # Plot using specified texttable object\n vcs.texttable(tt)\n\n\n:param name: Name of created object\n:type name: str\n\n:param source: a texttable, or string name of a texttable\n:type source: str\n\n:param font: Which font to use (index or name).\n:type font: int or string\n\n:param expansion: DEPRECATED\n:type expansion: DEPRECATED\n\n:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,\n or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))\n:type color: str or int\n\n:param priority: The layer on which the texttable will be drawn.\n:type priority: int\n\n:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas\n:type viewport: list of floats\n\n:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)\n:type worldcoordinate: list of floats\n\n:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].\n:type x: list of floats\n\n:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].\n:type y: list of floats\n\n:returns: A texttable graphics method object\n:rtype: vcs.texttable.Tt\n\n\"\"\"\n\n name, source = check_name_source(name, source, 'texttable')\n\n tt = texttable.Tt(name, source)\n try:\n if (font is not None):\n tt.font = font\n if (spacing is not None):\n tt.spacing = spacing\n if (expansion is not None):\n tt.expansion = expansion\n if (color is not None):\n tt.color = color\n if (priority is not None):\n tt.priority = priority\n if (viewport is not None):\n tt.viewport = viewport\n if (worldcoordinate is not None):\n tt.worldcoordinate = worldcoordinate\n if (x is not None):\n tt.x = x\n if (y is not None):\n tt.y = y\n return tt\n except:\n pass\n\n\ndef gettexttable(name='default', font=None,\n spacing=None, expansion=None, color=None,\n priority=None, viewport=None,\n worldcoordinate=None,\n x=None, y=None):\n \"\"\"\n %s\n\n :param name: String name of an existing VCS texttable object\n :type name: str\n\n :param font: ???\n :type font: ???\n\n :param expansion: ???\n :type expansion: ???\n\n :param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,\n or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))\n :type color: str or int\n\n :param priority: The layer on which the texttable will be drawn.\n :type priority: int\n\n :param viewport: 4 floats between 0 and 1.\n These specify the area that the X/Y values are mapped to inside of the canvas\n :type viewport: list of floats\n\n :param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)\n :type worldcoordinate: list of floats\n\n :param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].\n :type x: list of floats\n\n :param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].\n :type y: list of floats\n\n :returns: A texttable graphics method object\n :rtype: vcs.texttable.Tt\n \"\"\" % get_texttable_doc\n\n # Check to make sure the argument passed in is a STRING\n if not isinstance(name, str):\n raise vcsError('The argument must be a string.')\n\n if name not in vcs.elements[\"texttable\"]:\n raise ValueError(\"The texttable '%s' does not exists\" % name)\n return vcs.elements[\"texttable\"][name]\n\n\ndef createtextorientation(name=None, source='default'):\n \"\"\"\n Create a new textorientation secondary method given the the name and\n the existing textorientation secondary method to copy the attributes\n from. If no existing textorientation secondary method name is given,\n then the default textorientation secondary method will be used as the\n secondary method to which the attributes will be copied from.\n\n .. note::\n If the name provided already exists, then an error will be returned.\n Secondary method names must be unique.\n\n :Example:\n\n::\n\n vcs.show('textorientation')\n to=vcs.createtextorientation('example1',)\n vcs.show('textorientation')\n to=vcs.createtextorientation('example2','black')\n vcs.show('textorientation')\n\n\n:param name: The name of the created object\n:type name: str\n\n:param source: The object to inherit from\n:type source: a textorientation or a string name of a textorientation\n\n:returns: A textorientation secondary method\n:rtype: vcs.textorientation.To\n\"\"\"\n\n name, source = check_name_source(name, source, 'textorientation')\n\n return textorientation.To(name, source)\n\n\ndef gettextorientation(To_name_src='default'):\n \"\"\"\n VCS contains a list of secondary methods. This function will create\n a textorientation class object from an existing VCS textorientation\n secondary method. If no textorientation name is given, then\n textorientation 'default' will be used.\n\n .. note::\n VCS does not allow the modification of `default' attribute sets.\n However, a `default' attribute set that has been copied under a\n different name can be modified. (See the createtextorientation function.)\n\n :Example:\n\n::\n\n a=vcs.init()\n # Show all the existing textorientation secondary methods\n vcs.show('textorientation')\n # to instance of 'default' textorientation secondary method\n to=vcs.gettextorientation()\n # to2 instance of existing 'quick' textorientation secondary method\n to2=vcs.gettextorientation('quick')\n\n:param To_name_src: String name of an existing textorientation VCS object\n:type To_name_src: str\n\n:returns: A textorientation VCS object\n:rtype: vcs.textorientation.To\n \"\"\"\n\n # Check to make sure the argument passed in is a STRING\n if not isinstance(To_name_src, str):\n raise vcsError('The argument must be a string.')\n\n if To_name_src not in vcs.elements[\"textorientation\"]:\n raise ValueError(\n \"The textorientation '%s' does not exists\" %\n To_name_src)\n return vcs.elements[\"textorientation\"][To_name_src]\n\n\ndef createtextcombined(Tt_name=None, Tt_source='default', To_name=None, To_source='default',\n font=None, spacing=None, expansion=None, color=None,\n priority=None, viewport=None, worldcoordinate=None, x=None, y=None,\n height=None, angle=None, path=None, halign=None, valign=None, projection=None):\n \"\"\"\n Create a new textcombined secondary method given the the names and\n the existing texttable and textorientation secondary methods to copy\n the attributes from. If no existing texttable and textorientation\n secondary method names are given, then the default texttable and\n textorientation secondary methods will be used as the secondary method\n to which the attributes will be copied from.\n\n .. note::\n If the name provided already exists, then an error will be returned.\n Secondary method names must be unique.\n\n :Example:\n\n::\n # Show available texttable object\n vcs.show('texttable')\n # Show available textorientation object\n vcs.show('textorientation')\n # Show font options\n vcs.show('font')\n tc=vcs.createtextcombined('example1','std','example1','7left')\n vcs.show('texttable')\n vcs.show('textorientation')\n\n:param Tt_name: Name of created object\n:type Tt_name: str\n\n:param Tt_source: Texttable object to inherit from. Can be a texttable, or a string name of a texttable.\n:type Tt_source: str or vcs.texttable.Tt\n\n:param To_name: Name of the textcombined's text orientation (to be created)\n:type To_name: str\n\n:param To_source: Name of the textorientation to inherit.\n Can be a textorientation, or a string name of a textorientation.\n:type To_source: str or vcs.textorientation.To\n\n:param font: Which font to use (index or name).\n:type font: int or str\n\n:param spacing: DEPRECATED\n:type spacing: DEPRECATED\n\n:param expansion: DEPRECATED\n:type expansion: DEPRECATED\n\n:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,\n or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))\n:type color: str or int\n\n:param priority: The layer on which the object will be drawn.\n:type priority: int\n\n:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas\n:type viewport: list of floats\n\n:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)\n:type worldcoordinate: list of floats\n\n:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].\n:type x: list of floats\n\n:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].\n:type y: list of floats\n\n:param height: Size of the font\n:type height: int\n\n:param angle: Angle of the text, in degrees\n:type angle: int\n\n:param path: DEPRECATED\n:type path: DEPRECATED\n\n:param halign: Horizontal alignment of the text. One of [\"left\", \"center\", \"right\"].\n:type halign: str\n\n:param valign: Vertical alignment of the text. One of [\"top\", \"center\", \"botom\"].\n:type valign: str\n\n:param projection: Specify a geographic projection used to convert x/y from spherical coordinates into 2D coordinates.\n:type projection: str or projection object\n\n:returns: A VCS text object\n:rtype: vcs.textcombined.Tc\n\n\"\"\"\n # Check if to is defined\n if To_name is None:\n To_name = Tt_name\n Tt_name, Tt_source = check_name_source(Tt_name, Tt_source, 'texttable')\n To_name, To_source = check_name_source(\n To_name, To_source, 'textorientation')\n\n tc = textcombined.Tc(Tt_name, Tt_source, To_name, To_source)\n if (font is not None):\n tc.font = font\n if (spacing is not None):\n tc.spacing = spacing\n if (expansion is not None):\n tc.expansion = expansion\n if (color is not None):\n tc.color = color\n if (priority is not None):\n tc.priority = priority\n if (viewport is not None):\n tc.viewport = viewport\n if (worldcoordinate is not None):\n tc.worldcoordinate = worldcoordinate\n if (x is not None):\n tc.x = x\n if (y is not None):\n tc.y = y\n if (height is not None):\n tc.height = height\n if (angle is not None):\n tc.angle = angle\n if (path is not None):\n tc.path = path\n if (halign is not None):\n tc.halign = halign\n if (valign is not None):\n tc.valign = valign\n if (projection is not None):\n tc.projection = projection\n return tc\n#\n# Set alias for the secondary createtextcombined.\ncreatetext = createtextcombined\n\n\ndef gettextcombined(Tt_name_src='default', To_name_src=None, string=None, font=None, spacing=None,\n expansion=None, color=None,\n priority=None, viewport=None, worldcoordinate=None, x=None, y=None,\n height=None, angle=None, path=None, halign=None, valign=None):\n \"\"\"\n VCS contains a list of secondary methods. This function will create\n a textcombined class object from an existing VCS texttable secondary\n method and an existing VCS textorientation secondary method. If no\n texttable or textorientation names are given, then the 'default' names\n will be used in both cases.\n\n .. note::\n VCS does not allow the modification of `default' attribute sets.\n However, a `default' attribute set that has been copied under a\n different name can be modified. (See the createtextcombined function.)\n\n .. deprecated:: 1.0\n path, spacing, and expansion parameters are no longer used.\n\n :Example:\n\n::\n\n # Show all the existing texttable secondary methods\n vcs.show('texttable')\n # Show all the existing textorientation secondary methods\n vcs.show('textorientation')\n # Show all font names\n vcs.show('font')\n # Use 'default' for texttable and textorientation\n tc=vcs.gettextcombined()\n # Use 'std' texttable and '7left' textorientation\n tc2=vcs.gettextcombined('std','7left')\n # Check to see if tc is a textcombined\n if istextcombined(tc):\n # Print out all its attriubtes\n tc.list()\n\n\n:param Tt_name_src: Name of created object\n:type Tt_name_src: str\n\n:param To_name_src: Name of parent textorientation object\n:type To_name_src: str\n\n:param string: Text to render\n:param string: list of str\n\n:param font: Which font to use (index or name)\n:type font: int or str\n\n:param spacing: DEPRECATED\n:type spacing: DEPRECATED\n\n:param expansion: DEPRECATED\n:type expansion: DEPRECATED\n\n:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,\n or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))\n:type color: str or int\n\n:param priority: The layer on which the object will be drawn.\n:type priority: int\n\n:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas\n:type viewport: list of floats\n\n:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)\n:type worldcoordinate: list of floats\n\n:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].\n:type x: list of floats\n\n:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].\n:type y: list of floats\n\n:param height: Size of the font\n:type height: int\n\n:param angle: Angle of the rendered text, in degrees\n:type angle: list of int\n\n:param path: DEPRECATED\n:type path: DEPRECATED\n\n:param halign: Horizontal alignment of the text. One of [\"left\", \"center\", \"right\"]\n:type halign: str\n\n:param valign: Vertical alignment of the text. One of [\"top\", \"center\", \"bottom\"]\n:type valign: str\n\n:returns: A textcombined object\n:rtype: vcs.textcombined.Tc\n \"\"\"\n\n # Check to make sure the arguments passed in are a STRINGS\n if not isinstance(Tt_name_src, str):\n raise vcsError('The first argument must be a string.')\n if To_name_src is None:\n sp = Tt_name_src.split(\":::\")\n if len(sp) == 2:\n Tt_name_src = sp[0]\n To_name_src = sp[1]\n if not isinstance(To_name_src, str):\n raise vcsError('The second argument must be a string.')\n\n tc = vcs.elements[\"textcombined\"].get(\n \"%s:::%s\" %\n (Tt_name_src, To_name_src), None)\n if tc is None:\n raise Exception(\n \"No such text combined: %s:::%s\" %\n (Tt_name_src, To_name_src))\n\n if (string is not None) and (tc.Tt_name != \"default\"):\n tc.string = string\n if (font is not None) and (tc.Tt_name != \"default\"):\n tc.font = font\n if (spacing is not None) and (tc.Tt_name != \"default\"):\n tc.spacing = spacing\n if (expansion is not None) and (tc.Tt_name != \"default\"):\n tc.expansion = expansion\n if (color is not None) and (tc.Tt_name != \"default\"):\n tc.color = color\n if (priority is not None) and (tc.Tt_name != \"default\"):\n tc.priority = priority\n if (viewport is not None) and (tc.Tt_name != \"default\"):\n tc.viewport = viewport\n if (worldcoordinate is not None) and (tc.Tt_name != \"default\"):\n tc.worldcoordinate = worldcoordinate\n if (x is not None) and (tc.To_name != \"default\"):\n tc.x = x\n if (y is not None) and (tc.To_name != \"default\"):\n tc.y = y\n if (height is not None) and (tc.To_name != \"default\"):\n tc.height = height\n if (angle is not None) and (tc.To_name != \"default\"):\n tc.angle = angle\n if (path is not None) and (tc.To_name != \"default\"):\n tc.path = path\n if (halign is not None) and (tc.To_name != \"default\"):\n tc.halign = halign\n if (valign is not None) and (tc.To_name != \"default\"):\n tc.valign = valign\n return tc\n#\n# Set alias for the secondary gettextcombined.\ngettext = gettextcombined\n\n\ndef get3d_scalar(Gfdv3d_name_src='default'):\n \"\"\"\n VCS contains a list of graphics methods. This function will create a\n dv3d class object from an existing VCS dv3d graphics method. If\n no dv3d name is given, then dv3d 'default' will be used.\n\n ..note::\n VCS does not allow the modification of `default' attribute\n sets. However, a `default' attribute set that has been copied under a\n different name can be modified. (See the create3Dscalar function.)\n\n :Example:\n\n::\n\n # Show all the existing 3Dscalar graphics methods\n a.show('3d_scalar')\n # plot instance of 'default' dv3d graphics method\n plot=vcs.get3d_scalar()\n\n:param Gfdv3d_name_src: String name of an existing 3d_scalar VCS object.\n:type Gfdv3d_name_src: str\n\n:returns: A pre-existing 3d_scalar VCS object\n:rtype: vcs.dv3d.Gf3Dscalar\n\"\"\"\n\n # Check to make sure the argument passed in is a STRING\n if not isinstance(Gfdv3d_name_src, str):\n raise vcsError('The argument must be a string.')\n\n if Gfdv3d_name_src not in vcs.elements[\"3d_scalar\"]:\n raise ValueError(\"dv3d '%s' does not exists\" % Gfdv3d_name_src)\n\n return vcs.elements[\"3d_scalar\"][Gfdv3d_name_src]\n\n\ndef create3d_scalar(name=None, source='default'):\n \"\"\"\n Create a new dv3d graphics method given the the name and the existing\n dv3d graphics method to copy the attributes from. If no existing\n dv3d graphics method name is given, then the default dv3d graphics\n method will be used as the graphics method to which the attributes will\n be copied from.\n\n .. note::\n If the name provided already exists, then an error will be returned. Graphics method names must be unique.\n\n :Example:\n\n::\n\n a=vcs.init()\n a.show('3d_scalar')\n plot=a.create3d_scalar()\n\n:param name: The name of the created object\n:type name: str\n\n:param source: The object to inherit from\n:type source: a 3d_scalar or a string name of a 3d_scalar\n\n:returns: A 3d_scalar graphics method object\n:rtype: vcs.dv3d.Gf3Dscalar\n\"\"\"\n name, source = check_name_source(name, source, '3d_scalar')\n return dv3d.Gf3Dscalar(name, source)\n\n\ndef get3d_dual_scalar(Gfdv3d_name_src='default'):\n \"\"\"\n VCS contains a list of graphics methods. This function will create a\n dv3d class object from an existing VCS dv3d graphics method. If\n no dv3d name is given, then dv3d 'default' will be used.\n\n .. note::\n VCS does not allow the modification of `default' attribute\n sets. However, a `default' attribute set that has been copied under a\n different name can be modified. (See the create3Dscalar function.)\n\n :Example:\n\n::\n\n # Show all the existing 3Dscalar graphics methods\n a.show('3d_dual_scalar')\n # plot instance of 'default' dv3d graphics method\n plot=vcs.get3d_dual_scalar()\n\n:param Gfdv3d_name_src: String name of an existing 3d_dual_scalar VCS object\n:type Gfdv3d_name_src: str\n\n:returns: A pre-existing 3d_dual_scalar VCS object\n:rtype: vcs.dv3d.Gf3DDualScalar\n\"\"\"\n # Check to make sure the argument passed in is a STRING\n if not isinstance(Gfdv3d_name_src, str):\n raise vcsError('The argument must be a string.')\n\n if Gfdv3d_name_src not in vcs.elements[\"3d_dual_scalar\"]:\n raise ValueError(\"dv3d '%s' does not exists\" % Gfdv3d_name_src)\n\n return vcs.elements[\"3d_dual_scalar\"][Gfdv3d_name_src]\n\n\ndef create3d_dual_scalar(name=None, source='default'):\n \"\"\"\n Create a new dv3d graphics method given the the name and the existing\n dv3d graphics method to copy the attributes from. If no existing\n dv3d graphics method name is given, then the default dv3d graphics\n method will be used as the graphics method to which the attributes will\n be copied from.\n\n .. note::\n If the name provided already exists, then an error will be returned. Graphics\n method names must be unique.\n\n :Example:\n\n ::\n\n a=vcs.init()\n a.show('3d_dual_scalar')\n plot=a.create3d_dual_scalar()\n\n :param name: The name of the created object\n :type name: str\n\n :param source: The object to inherit from\n :type source: a 3d_dual_scalar or a string name of a 3d_dual_scalar\n\n :returns: A 3d_dual_scalar graphics method object\n :rtype: vcs.dv3d.Gf3DDualScalar\n \"\"\"\n\n name, source = check_name_source(name, source, '3d_dual_scalar')\n return dv3d.Gf3DDualScalar(name, source)\n\n\ndef get3d_vector(Gfdv3d_name_src='default'):\n \"\"\"\n VCS contains a list of graphics methods. This function will create a\n dv3d class object from an existing VCS dv3d graphics method. If\n no dv3d name is given, then dv3d 'default' will be used.\n\n Note, VCS does not allow the modification of `default' attribute\n sets. However, a `default' attribute set that has been copied under a\n different name can be modified. (See the create3Dvector function.)\n\n :Example:\n\n::\n\n # Show all the existing 3Dvector graphics methods\n a.show('3d_vector')\n # plot instance of 'default' dv3d graphics method\n plot=vcs.get3d_vector()\n\n:param Gfdv3d_name_src: String name of an existing 3d_vector VCS object\n:type Gfdv3d_name_src: str\n\n:returns: A pre-existing 3d_vector VCS object\n:rtype: vcs.dv3d.Gf3Dvector\n\n\"\"\"\n\n # Check to make sure the argument passed in is a STRING\n if not isinstance(Gfdv3d_name_src, str):\n raise vcsError('The argument must be a string.')\n\n if Gfdv3d_name_src not in vcs.elements[\"3d_vector\"]:\n raise ValueError(\"dv3d '%s' does not exists\" % Gfdv3d_name_src)\n\n return vcs.elements[\"3d_vector\"][Gfdv3d_name_src]\n\n\ndef create3d_vector(name=None, source='default'):\n \"\"\"\n Create a new dv3d graphics method given the the name and the existing\n dv3d graphics method to copy the attributes from. If no existing\n dv3d graphics method name is given, then the default dv3d graphics\n method will be used as the graphics method to which the attributes will\n be copied from.\n\n .. note::\n If the name provided already exists, then an error will be returned. Graphics\n method names must be unique.\n\n :Example:\n\n::\n\n a=vcs.init()\n a.show('3d_vector')\n plot=a.create3d_vector()\n\n:param name: The name of the created object\n:type name: str\n\n:param source: The object to inherit from\n:type source: a 3d_vector or a string name of a 3d_vector\n\n:returns: A 3d_vector graphics method object\n:rtype: vcs.dv3d.Gf3Dvector\n\n\"\"\"\n\n name, source = check_name_source(name, source, '3d_vector')\n return dv3d.Gf3Dvector(name, source)\n\n#############################################################################\n# #\n# Colormap functions for VCS. #\n# #\n#############################################################################\n\n\ndef createcolormap(Cp_name=None, Cp_name_src='default'):\n \"\"\"\n Create a new colormap secondary method given the the name and the existing\n colormap secondary method to copy the attributes from. If no existing colormap\n secondary method name is given, then the default colormap secondary method\n will be used as the secondary method to which the attributes will be\n copied from.\n\n .. note::\n If the name provided already exists, then an error will be returned.\n Secondary method names must be unique.\n\n :Example:\n\n::\n\n a=vcs.init()\n cp=a.createcolormap('example1',)\n a.show('colormap')\n cp=a.createcolormap('example2','AMIP')\n a.show('colormap')\n\n:param Cp_name: The name of the created object\n:type Cp_name: str\n\n:param Cp_name_src: The object to inherit\n:type Cp_name_src: a colormap or a string name of a colormap\n\n:returns: A VCS colormap object\n:rtype: vcs.colormap.Cp\n\"\"\"\n\n Cp_name, Cp_name_src = check_name_source(Cp_name, Cp_name_src, 'colormap')\n return colormap.Cp(Cp_name, Cp_name_src)\n\n\ndef getcolormap(Cp_name_src='default'):\n \"\"\"\n VCS contains a list of secondary methods. This function will create a\n colormap class object from an existing VCS colormap secondary method. If\n no colormap name is given, then colormap 'default' will be used.\n\n .. note::\n VCS does not allow the modification of `default' attribute sets.\n However, a `default' attribute set that has been copied under a\n different name can be modified. (See the createcolormap function.)\n\n :Example:\n\n ::\n\n a=vcs.init()\n # Show all the existing colormap secondary methods\n a.show('colormap')\n # cp instance of 'default' colormap secondary method\n cp=a.getcolormap()\n # cp2 instance of existing 'quick' colormap secondary method\n cp2=a.getcolormap('quick')\n\n\n :param Cp_name_src: String name of an existing colormap VCS object\n :type Cp_name_src: str\n\n :returns: A pre-existing VCS colormap object\n :rtype: vcs.colormap.Cp\n\n\"\"\"\n # Check to make sure the argument passed in is a STRING\n if not isinstance(Cp_name_src, str):\n raise ValueError('Error - The argument must be a string.')\n\n return vcs.elements[\"colormap\"][Cp_name_src]\n\n# Function that deal with removing existing vcs elements\n\n\ndef removeG(obj, gtype=\"boxfill\"):\n exec(\"res = vcs.is%s(obj)\" % gtype)\n if isinstance(obj, str):\n name = obj\n if obj not in vcs.elements[gtype].keys():\n raise RuntimeError(\"Cannot remove inexisting %s %s\" % (gtype, obj))\n else:\n name = obj.name\n if not res: # noqa\n raise RuntimeError(\"You are trying to remove a VCS %s but %s is not one\" % (gtype, repr(obj)))\n msg = \"Removed %s object %s\" % (gtype, name)\n del(vcs.elements[gtype][name])\n return msg\n\n\ndef removeGfb(obj):\n return removeG(obj, \"boxfill\")\n\n\ndef removeGfi(obj):\n return removeG(obj, \"isofill\")\n\n\ndef removeGi(obj):\n return removeG(obj, \"isoline\")\n\n\ndef removeGXy(obj):\n return removeG(obj, \"xyvsx\")\n\n\ndef removeGYx(obj):\n return removeG(obj, \"yxvsx\")\n\n\ndef removeGXY(obj):\n return removeG(obj, \"xvsy\")\n\n\ndef removeG1d(obj):\n return removeG(obj, \"1d\")\n\n\ndef removeGv(obj):\n return removeG(obj, \"vector\")\n\n\ndef removeGSp(obj):\n return removeG(obj, \"scatter\")\n\n\ndef removeGfm(obj):\n return removeG(obj, \"meshfill\")\n\n\ndef removeGtd(obj):\n return removeG(obj, \"taylordiagram\")\n\n\ndef removeTl(obj):\n return removeG(obj, \"line\")\n\n\ndef removeTm(obj):\n return removeG(obj, \"marker\")\n\n\ndef removeTf(obj):\n return removeG(obj, \"fillarea\")\n\n\ndef removeTt(obj):\n return removeG(obj, \"texttable\")\n\n\ndef removeTo(obj):\n return removeG(obj, \"textorientation\")\n\n\ndef removeTc(obj):\n if isinstance(obj, str):\n Tt, To = obj.split(\":::\")\n else:\n To = obj.To_name\n Tt = obj.Tt_name\n msg = removeTt(Tt)\n msg += removeTo(To)\n removeG(obj, \"textcombined\")\n return msg\n\n\ndef removeProj(obj):\n return removeG(obj, \"projection\")\n\n\ndef removeCp(obj):\n return removeG(obj, \"colormap\")\n\n\ndef removeP(obj):\n return removeG(obj, \"template\")\n\n\ndef removeobject(obj):\n \"\"\"\n The user has the ability to create primary and secondary class\n objects. The function allows the user to remove these objects\n from the appropriate class list.\n\n Note, To remove the object completely from Python, remember to\n use the \"del\" function.\n\n Also note, The user is not allowed to remove a \"default\" class\n object.\n\n :Example:\n\n::\n\n a=vcs.init()\n # To Modify an existing line object\n line=a.getline('red')\n # Create an instance of an isoline object\n iso=a.createisoline('dean')\n #...\n # Removes line object from VCS list\n a.remove(line)\n # Remove isoline object from VCS list\n a.remove(iso)\n\n:param obj: Any VCS primary or secondary object\n:type obj: VCS object\n\n:returns: String indicating the specified object was removed\n:rtype: str\n \"\"\"\n\n if vcs.istemplate(obj):\n msg = vcs.removeP(obj.name)\n elif vcs.isgraphicsmethod(obj):\n if (obj.g_name == 'Gfb'):\n msg = vcs.removeGfb(obj.name)\n elif (obj.g_name == 'Gfi'):\n msg = vcs.removeGfi(obj.name)\n elif (obj.g_name == 'Gi'):\n msg = vcs.removeGi(obj.name)\n elif (obj.g_name == 'GXy'):\n msg = vcs.removeGXy(obj.name)\n elif (obj.g_name == 'GYx'):\n msg = vcs.removeGYx(obj.name)\n elif (obj.g_name == 'GXY'):\n msg = vcs.removeGXY(obj.name)\n elif (obj.g_name == 'Gv'):\n msg = vcs.removeGv(obj.name)\n elif (obj.g_name == 'GSp'):\n msg = vcs.removeGSp(obj.name)\n elif (obj.g_name == 'Gfm'):\n msg = vcs.removeGfm(obj.name)\n elif (obj.g_name == 'G1d'):\n msg = vcs.removeG1d(obj.name)\n elif (obj.g_name == 'Gtd'):\n msg = vcs.removeGtd(obj.name)\n else:\n msg = 'Could not find the correct graphics class object.'\n elif vcs.issecondaryobject(obj):\n if (obj.s_name == 'Tl'):\n msg = vcs.removeTl(obj.name)\n elif (obj.s_name == 'Tm'):\n msg = vcs.removeTm(obj.name)\n elif (obj.s_name == 'Tf'):\n msg = vcs.removeTf(obj.name)\n elif (obj.s_name == 'Tt'):\n msg = vcs.removeTt(obj.name)\n elif (obj.s_name == 'To'):\n msg = vcs.removeTo(obj.name)\n elif (obj.s_name == 'Tc'):\n msg = vcs.removeTc(obj.name)\n elif (obj.s_name == 'Proj'):\n msg = vcs.removeProj(obj.name)\n elif (obj.s_name == 'Cp'):\n msg = vcs.removeCp(obj.name)\n else:\n msg = 'Could not find the correct secondary class object.'\n else:\n msg = 'This is not a template, graphics method, or secondary method object.'\n return msg\n" }, { "alpha_fraction": 0.6107341647148132, "alphanum_fraction": 0.6204766631126404, "avg_line_length": 32.37010192871094, "blob_id": "b2691af10591da36d104fa5e16a94651366e67ae", "content_id": "2613395cd833a34b39cef38e2de74ee7bc64df0b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 22992, "license_type": "no_license", "max_line_length": 203, "num_lines": 689, "path": "/vcs/xmldocs.py", "repo_name": "embrown/vcs", "src_encoding": "UTF-8", "text": "plot_keywords_doc = \"\"\"\n:param xaxis: Axis object to replace the slab -1 dim axis\n:param yaxis: Axis object to replace the slab -2 dim axis, only if slab has more than 1D\n:param zaxis: Axis object to replace the slab -3 dim axis, only if slab has more than 2D\n:param taxis: Axis object to replace the slab -4 dim axis, only if slab has more than 3D\n:param waxis: Axis object to replace the slab -5 dim axis, only if slab has more than 4D\n:param xrev: reverse x axis\n:param yrev: reverse y axis, only if slab has more than 1D\n:param xarray: Values to use instead of x axis\n:param yarray: Values to use instead of y axis, only if var has more than 1D\n:param zarray: Values to use instead of z axis, only if var has more than 2D\n:param tarray: Values to use instead of t axis, only if var has more than 3D\n:param warray: Values to use instead of w axis, only if var has more than 4D\n:param continents: continents type number\n:param name: replaces variable name on plot\n:param time: replaces time name on plot\n:param units: replaces units value on plot\n:param ymd: replaces year/month/day on plot\n:param hms: replaces hh/mm/ss on plot\n:param file_comment: replaces file_comment on plot\n:param xbounds: Values to use instead of x axis bounds values\n:param ybounds: Values to use instead of y axis bounds values (if exist)\n:param xname: replace xaxis name on plot\n:param yname: replace yaxis name on plot (if exists)\n:param zname: replace zaxis name on plot (if exists)\n:param tname: replace taxis name on plot (if exists)\n:param wname: replace waxis name on plot (if exists)\n:param xunits: replace xaxis units on plot\n:param yunits: replace yaxis units on plot (if exists)\n:param zunits: replace zaxis units on plot (if exists)\n:param tunits: replace taxis units on plot (if exists)\n:param wunits: replace waxis units on plot (if exists)\n:param xweights: replace xaxis weights used for computing mean\n:param yweights: replace xaxis weights used for computing mean\n:param comment1: replaces comment1 on plot\n:param comment2: replaces comment2 on plot\n:param comment3: replaces comment3 on plot\n:param comment4: replaces comment4 on plot\n:param long_name: replaces long_name on plot\n:param grid: replaces array grid (if exists)\n:param bg: plots in background mode\n:param ratio: sets the y/x ratio ,if passed as a string with 't' at the end, will aslo moves the ticks\n:type xaxis: cdms2.axis.TransientAxis\n:type yaxis: cdms2.axis.TransientAxis\n:type zaxis: cdms2.axis.TransientAxis\n:type taxis: cdms2.axis.TransientAxis\n:type waxis: cdms2.axis.TransientAxis\n:type xrev: bool\n:type yrev: bool\n:type xarray: array\n:type yarray: array\n:type zarray: array\n:type tarray: array\n:type warray: array\n:type continents: int\n:type name: str\n:type time: A cdtime object\n:type units: str\n:type ymd: str\n:type hms: str\n:type file_comment: str\n:type xbounds: array\n:type ybounds: array\n:type xname: str\n:type yname: str\n:type zname: str\n:type tname: str\n:type wname: str\n:type xunits: str\n:type yunits: str\n:type zunits: str\n:type tunits: str\n:type wunits: str\n:type xweights: array\n:type yweights: array\n:type comment1: str\n:type comment2: str\n:type comment3: str\n:type comment4: str\n:type long_name: str\n:type grid: cdms2.grid.TransientRectGrid\n:type bg: bool/int\n:type ratio: int/str\n\"\"\" # noqa\n\ndata_time = \"\"\"\n .. py:attribute:: datawc_timeunits (str)\n\n (Ex: 'days since 2000') units to use when displaying time dimension auto tick\n\n .. py:attribute:: datawc_calendar (int)\n\n (Ex: 135441) calendar to use when displaying time dimension auto tick, default is proleptic gregorian calendar\n\n\"\"\" # noqa\ngraphics_method_core_notime = \"\"\"\n .. py:attribute:: xmtics1 (str/{float:str})\n\n (Ex: '') dictionary with location of intermediate tics as keys for 1st side of y axis\n\n .. py:attribute:: xmtics2 (str/{float:str})\n\n (Ex: '') dictionary with location of intermediate tics as keys for 2nd side of y axis\n\n .. py:attribute:: ymtics1 (str/{float:str})\n\n (Ex: '') dictionary with location of intermediate tics as keys for 1st side of y axis\n\n .. py:attribute:: ymtics2 (str/{float:str})\n\n (Ex: '') dictionary with location of intermediate tics as keys for 2nd side of y axis\n\n .. py:attribute:: xticlabels1 (str/{float:str})\n\n (Ex: '*') values for labels on 1st side of x axis\n\n .. py:attribute:: xticlabels2 (str/{float:str})\n\n (Ex: '*') values for labels on 2nd side of x axis\n\n .. py:attribute:: yticlabels1 (str/{float:str})\n\n (Ex: '*') values for labels on 1st side of y axis\n\n .. py:attribute:: yticlabels2 (str/{float:str})\n\n (Ex: '*') values for labels on 2nd side of y axis\n\n .. py:attribute:: projection (str/vcs.projection.Proj)\n\n (Ex: 'default') projection to use, name or object\n\n .. py:attribute:: datawc_x1 (float)\n\n (Ex: 1.E20) first value of xaxis on plot\n\n .. py:attribute:: datawc_x2 (float)\n\n (Ex: 1.E20) second value of xaxis on plot\n\n .. py:attribute:: datawc_y1 (float)\n\n (Ex: 1.E20) first value of yaxis on plot\n\n .. py:attribute:: datawc_y2 (float)\n\n (Ex: 1.E20) second value of yaxis on plot\n \"\"\" # noqa\ngraphics_method_core = \"\"\"%s\n%s\"\"\" % (graphics_method_core_notime, data_time)\naxisconvert = \"\"\"\n :param {axis}axisconvert: (Ex: 'linear') converting {axis}axis linear/log/log10/ln/exp/area_wt\n :type {axis}axisconvert: str\\n\"\"\"\nxaxisconvert = axisconvert.format(axis=\"x\")\nyaxisconvert = axisconvert.format(axis=\"y\")\naxesconvert = xaxisconvert + yaxisconvert\ncolorsdoc = \"\"\"\n Sets the color_1 and color_2 properties of the object.\n\n :param color1: Sets the :py:attr:`color_1` value on the object\n :type color1: int\n\n :param color2: Sets the :py:attr:`color_2` value on the object\n :type color2: int\n \"\"\"\n\nextsdoc = \"\"\"\n Sets the ext_1 and ext_2 values on the object.\n\n:param ext1: Sets the :py:attr:`ext_1` value on the object. 'y' sets it to True, 'n' sets it to False.\n:type ext1: str\n\n:param ext2: Sets the :py:attr:`ext_2` value on the object. 'y' sets it to True, 'n' sets it to False.\n:type ext2: str\n \"\"\"\nticlabeldoc = \"\"\"\n Sets the %sticlabels1 and %sticlabels2 values on the object\n\n:param %stl1: Sets the object's value for :py:attr:`%sticlabels1`.\n Must be a str, or a dictionary object with float:str mappings.\n:type %stl1: {float:str} or str\n\n:param %stl2: Sets the object's value for :py:attr:`%sticlabels2`.\n Must be a str, or a dictionary object with float:str mappings.\n:type %stl2: {float:str} or str\n \"\"\"\nxticlabelsdoc = ticlabeldoc % (('x',) * 8)\nyticlabelsdoc = ticlabeldoc % (('y',) * 8)\n\nmticsdoc = \"\"\"\n Sets the %smtics1 and %smtics2 values on the object\n\n:param %smt1: Value for :py:attr:`%smtics1`. Must be a str, or a dictionary object with float:str mappings.\n:type %smt1: {float:str} or str\n\n:param %smt2: Value for :py:attr:`%smtics2`. Must be a str, or a dictionary object with float:str mappings.\n:type %smt2: {float:str} or str\n\"\"\"\nxmticsdoc = mticsdoc % (('x',) * 8)\nymticsdoc = mticsdoc % (('y',) * 8)\n\ndatawcdoc = \"\"\"\n Sets the data world coordinates for object\n\n :param dsp1: Sets the :py:attr:`datawc_y1` property of the object.\n :type dsp1: float\n\n :param dsp2: Sets the :py:attr:`datawc_y2` property of the object.\n :type dsp2: float\n\n :param dsp3: Sets the :py:attr:`datawc_x1` property of the object.\n :type dsp3: float\n\n :param dsp4: Sets the :py:attr:`datawc_x2` property of the object.\n :type dsp4: float\n\n \"\"\"\nxyscaledoc = \"\"\"\n Sets xaxisconvert and yaxisconvert values for the object.\n\n :Example:\n\n::\n\n a=vcs.init()\n #create a boxfill to work with\n box=a.createboxfill('temp')\n # set xaxisconvert and yaxisconvert to 'linear'\n box.xyscale(xat='linear', yat='linear')\n\n :param xat: Set value for x axis conversion.\n :type xat: str\n\n :param yat: Set value for y axis conversion.\n :type yat: str\n \"\"\"\nlistdoc = \"\"\" Lists the current values of object attributes\"\"\"\n\n# Scriptdocs section\n\n# Use this dictionary for string replacements\n# dict keys are 'type', 'name', and 'call'\n# 'type' : The type of VCS object it is (i.e. Graphics method, secondary method, etc.)\n# 'name' : The name of the VCS object (i.e. boxfill, isofill, etc.)\n# 'call' : The function call for the object. Mostly, this is == name.\n# Some rare cases, like textcombined, require adjustment of this value.\ndict = {}\ndict['name'] = dict['type'] = dict['call'] = 'REPLACE_ME'\n\n\nscriptdoc = \"\"\"\n Saves out a copy of the %(name)s %(type)s in JSON, or Python format to a designated file.\n\n .. note::\n If the the filename has a '.py' at the end, it will produce a\n Python script. If no extension is given, then by default a\n .json file containing a JSON serialization of the object's\n data will be produced.\n\n .. warning::\n VCS Scripts Deprecated.\n SCR scripts are no longer generated by this function.\n\n :Example:\n\n .. doctest:: script_examples\n\n # Make a Canvas object to work with:\n >>> a=vcs.init()\n ...\n\n # Create %(call)s 'temp' that inherits from 'default'\n >>> ex=a.create%(call)s('temp')\n ...\n\n # Append to a Python script named 'filename.py'\n >>> ex.script('filename.py')\n ...\n\n # Create or overwrite a JSON file 'filename.json'.\n >>> ex.script('filename','w')\n ...\n\n :param script_filename: Output name of the script file. If no extension is specified, a .json object is created.\n :type script_filename: str\n\n :param mode: Either 'w' for replace, or 'a' for append. Defaults to 'a', if not specified.\n :type mode: str\n\"\"\"\n\n\n# Graphics Method scriptdocs\ndict['type'] = 'graphics method'\ndict['name'] = dict['call'] = 'colormap'\ncolormap_script = scriptdoc % dict\n\ndict['name'] = dict['call'] = 'boxfill'\nboxfill_script = scriptdoc % dict\n\ndict['name'] = dict['call'] = 'isoline'\nisoline_script = scriptdoc % dict\n\ndict['name'] = dict['call'] = 'isofill'\nisofill_script = scriptdoc % dict\n\ndict['name'] = dict['call'] = 'yxvsx'\nyxvsx_script = scriptdoc % dict\n\ndict['name'] = dict['call'] = 'meshfill'\nmeshfill_script = scriptdoc % dict\n\ndict['name'] = dict['call'] = 'fillarea'\nfillarea_script = scriptdoc % dict\n\ndict['name'] = dict['call'] = 'marker'\nmarker_script = scriptdoc % dict\n\ndict['name'] = dict['call'] = 'line'\nline_script = scriptdoc % dict\n\ndict['name'] = 'text table and text orientation'\ndict['call'] = 'textcombined'\ntextcombined_script = scriptdoc % dict\n\ndict['name'] = dict['call'] = 'textorientation'\ntextorientation_script = scriptdoc % dict\n\ndict['name'] = dict['call'] = 'texttable'\ntexttable_script = scriptdoc % dict\n\ndict['name'] = dict['call'] = 'vector'\nvector_script = scriptdoc % dict\n\n# Object scriptdocs\ndict['type'] = 'object'\ndict['name'] = dict['call'] = 'template'\ntemplate_script = scriptdoc % dict\n\n# Secondary Method scriptdocs\ndict['type'] = 'secondary method'\ndict['name'] = dict['call'] = 'projection'\nprojection_script = scriptdoc % dict\n\n# dict['parent'] is for rare cases where there is no 'default' object to inherit from.\ndict['parent'] = 'REPLACE_ME'\nqueries_is_doc = \"\"\"\n Check to see if this object is a VCS %(type)s %(name)s graphics method.\n\n :Example:\n\n .. doctest:: queries_is\n\n # Make a VCS Canvas object to work with:\n >>> a=vcs.init()\n ...\n\n # Show all available %(name)s\n >>> a.show('%(name)s')\n *...\n\n # To test an existing %(name)s object\n >>> ex = a.get%(name)s(%(parent)s)\n ...\n\n >>> print(vcs.queries.is%(name)s(ex))\n 1\n\n:param obj: A VCS object\n:type obj: VCS Object\n\n:returns: An integer indicating whether the object is a %(name)s graphics method (1), or not (0).\n:rtype: int\n\"\"\"\n# queries.is[PRIMARY_OBJECT]\ndict['type'] = 'primary'\ndict['parent'] = \"'default'\"\n\ndict['name'] = 'vector'\nisvector_doc = queries_is_doc % dict\ndict['name'] = 'taylordiagram'\nistaylordiagram_doc = queries_is_doc % dict\ndict['name'] = 'meshfill'\nismeshfill_doc = queries_is_doc % dict\ndict['name'] = 'boxfill'\nisboxfill_doc = queries_is_doc % dict\ndict['name'] = 'isofill'\nisisofill_doc = queries_is_doc % dict\ndict['name'] = 'isoline'\nisisoline_doc = queries_is_doc % dict\ndict['name'] = '3d_scalar'\nis3d_scalar_doc = queries_is_doc % dict\ndict['name'] = '3d_dual_scalar'\nis3d_dual_scalar_doc = queries_is_doc % dict\ndict['name'] = '3d_vector'\nis3d_vector_doc = queries_is_doc % dict\ndict['name'] = 'xvsy'\nisxvsy_doc = queries_is_doc % dict\ndict['name'] = 'yxvsx'\nisyxvsx_doc = queries_is_doc % dict\ndict['name'] = '1d'\nis1d_doc = queries_is_doc % dict\n\n# special inheritance cases\ndict['name'] = 'scatter'\ndict['parent'] = \"'default_scatter_'\"\nisscatter_doc = queries_is_doc % dict\ndict['name'] = 'xyvsy'\ndict['parent'] = \"'default_xyvsy_'\"\nisxyvsy_doc = queries_is_doc % dict\n\n# queries.is[SECONDARY_OBJECT]\ndict['type'] = 'secondary'\ndict['parent'] = \"'default'\"\n\ndict['name'] = 'line'\nisline_doc = queries_is_doc % dict\ndict['name'] = 'marker'\nismarker_doc = queries_is_doc % dict\ndict['name'] = 'fillarea'\nisfillarea_doc = queries_is_doc % dict\ndict['name'] = 'texttable'\nistexttable_doc = queries_is_doc % dict\ndict['name'] = 'textorientation'\nistextorientation_doc = queries_is_doc % dict\ndict['name'] = 'textcombined'\nistextcombined_doc = queries_is_doc % dict\ndict['name'] = 'marker'\nismarker_doc = queries_is_doc % dict\n\n# TODO: revise this to use dict key references\nget_methods_doc = \"\"\"\n VCS contains a list of secondary methods. This function will create a\n %(name)s class object from an existing VCS %(name)s %(type)s. If\n no %(name)s name is given, then %(name)s '%(parent)s' will be used.\n\n .. note::\n\n VCS does not allow the modification of 'default' attribute sets.\n However, a 'default' attribute set that has been copied under a\n different name can be modified. (See the :ref:`vcs.manageElements.createfillarea` function.)\n\n :Example:\n\n ::\n\n # Show all the existing %(name)s %(type)s\n vcs.show('%(name)s')\n # instance of '%(parent)s' %(name)s %(type)s\n ex=vcs.get%(call)s()\n # instance of existing 'quick' %(name)s %(type)s\n ex2=vcs.get%(call)s('quick')\n # Create instance of %(name)s object 'red'\n ex3=vcs.create%(call)s(name='new', name='red',style=1, index=1,\n color=242, priority=1, viewport=[0, 2.0, 0, 2.0],\n worldcoordinate=[0,100, 0,50]\n x=[0,20,40,60,80,100],\n y=[0,10,20,30,40,50] )\n # Plot using specified %(call)s object\n vcs.%(name)s(ex3)\n \"\"\"\n# Get for secondary methods with a 'default' available\ndict['parent'] = 'default'\ndict['type'] = 'secondary method'\n\ndict['name'] = dict['call'] = 'fillarea'\nget_fillarea_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_texttable_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_template_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_projection_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_boxfill_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_taylor_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_meshfill_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_isofill_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_isoline_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_1d_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_xyvsy_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_yxvsx_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_xvsy_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_vector_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_scatter_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_line_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_marker_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_textorientation_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_textcombined_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_3d_scalar_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_3d_dual_scalar_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_3d_vector_doc = get_methods_doc % dict\ndict['name'] = dict['call'] = 'texttable'\nget_colormap_doc = get_methods_doc % dict\n\nexts_attrs = \"\"\"\n .. py:attribute:: ext_1 (str)\n\n Draws an extension arrow on right side (values less than first range value)\n\n .. py:attribute:: ext_2 (str)\n\n Draws an extension arrow on left side (values greater than last range value)\n \"\"\"\n\nfillarea_colors_attr = \"\"\"\n .. py:attribute:: fillareacolors ([int,...])\n\n Colors to use for each level\n\"\"\"\n\nfillarea_attrs = \"\"\"\n .. py:attribute:: fillareastyle (str)\n\n Style to use for levels filling: solid/pattern/hatch\n\n .. py:attribute:: fillareaindices ([int,...])\n\n List of patterns to use when filling a level and using pattern/hatch\n\"\"\"\n\nlegend_attr = \"\"\"\n .. py:attribute:: legend (None/{float:str})\n\n Replaces the legend values in the dictionary keys with their associated string\n\"\"\"\n\nlevel_attrs = \"\"\"\n .. py:attribute:: level_1 (float)\n\n Sets the value of the legend's first level\n\n .. py:attribute:: level_2 (float)\n\n Sets the value of the legend's end level\n\"\"\"\n\nlevels_attr = \"\"\"\n .. py:attribute:: levels ([float,...]/[[float,float],...])\n\n Sets the levels range to use, can be either a list of contiguous levels, or list of tuples\n indicating first and last value of the range.\n\"\"\"\n\nmissing_attr = \"\"\"\n .. py:attribute:: missing (int)\n\n Color to use for missing value or values not in defined ranges\n\"\"\"\n\nmeshfill_doc = \"\"\"\n %s\n %s\n %s\n %s\n %s\n %s\n\"\"\" % (levels_attr, fillarea_colors_attr, fillarea_attrs, legend_attr, exts_attrs, missing_attr)\n\n\nisofill_doc = meshfill_doc\n\nfillareadoc = \"\"\"\n fillareacolor :: (int) (None) color to use for outfilling\n fillareastyle :: (str) ('solid') style to use for levels filling: solid/pattenr/hatch\n fillareaindex :: (int) (None) pattern to use when filling a level and using pattern/hatch\n\"\"\" # noqa\n\nlinesdoc = \"\"\" line :: ([str,...]/[vcs.line.Tl,...]/[int,...]) (['solid',]) line type to use for each isoline, can also pass a line object or line object name\n linecolors :: ([int,...]) ([241]) colors to use for each isoline\n linewidths :: ([float,...]) ([1.0]) list of width for each isoline\n \"\"\" # noqa\nlinedoc = \"\"\" line :: ([str,...]/[vcs.line.Tl,...]/[int,...]) (['solid',]) line type to use for each isoline, can also pass a line object or line object name\n linecolor :: (int) (241) colors to use for each isoline\n linewidth :: (float) (1.0) list of width for each isoline\n \"\"\" # noqa\n\ntextsdoc = \"\"\"\n text :: (None/[vcs.textcombined.Tc,...]) (None) text objects or text objects names to use for each countour labels\n textcolors :: (None/[int,...]) (None) colors to use for each countour labels\n\"\"\" # noqa\n\nmarkerdoc = \"\"\"\n marker :: (None/int/str/vcs.marker.Tm) (None) markers type to use\n markercolor :: (None/int) (None) color to use for markers\n markersize :: (None/int) (None) size of markers\n\"\"\"\n\n#############################################################################\n# #\n# Graphics Method input section. #\n# #\n#############################################################################\n\ncreate_GM_input = \"\"\"\n:param new_GM_name: (Ex: 'my_awesome_gm') name of the new graphics method object. If no name is given, then one will be created for use.\n:type new_GM_name: str\n:param source_GM_name: (Ex: 'default') copy the contents of the source object to the newly created one. If no name is given, then the 'default' graphics methond contents is copied over to the new object.\n:type source_GM_name: str\n\"\"\" # noqa\n\nget_GM_input = \"\"\"\n:param GM_name: (Ex: 'default') retrieve the graphics method object of the given name. If no name is given, then retrieve the 'default' graphics method.\n:type GM_name: str\n\"\"\" # noqa\n\nplot_1D_input = \"\"\"\n:param slab: (Ex: [1, 2]) Data at least 1D, last dimension will be plotted\n:type slab: array\n\"\"\" # noqa\n\nplot_2D_input = \"\"\"\n:param slab: (Ex: [[0, 1]]) Data at least 2D, last 2 dimensions will be plotted\n:type slab: array\n\"\"\" # noqa\n\nplot_2_1D_input = \"\"\"\n:param slab_or_primary_object: Data at least 1D, last dimension(s) will be plotted, or secondary vcs object\n:type slab_or_primary_object: array\n\"\"\" # noqa\nplot_2_1D_options = \"\"\"\n:param slab2: Data at least 1D, last dimension(s) will be plotted\n:param template: ('default') vcs template to use\n:param gm: (Ex: 'default') graphic method to use\n:type slab2: array\n:type template: str/vcs.template.P\n:type gm: VCS graphics method object\n\"\"\" # noqa\n#############################################################################\n# #\n# Graphics Method output section. #\n# #\n#############################################################################\nplot_output = \"\"\"\n:return: Display Plot object representing the plot.\n:rtype: vcs.displayplot.Dp\n\"\"\"\n\nboxfill_output = \"\"\"\n boxfill :: (Ex: 0) no default\n\"\"\"\n\nisofill_output = \"\"\"\n isofill :: (Ex: 0) no default\n\"\"\"\n\nisoline_output = \"\"\"\n isoline :: (Ex: 0) no default\n\"\"\"\n\nyxvsx_output = \"\"\"\n yxvsx :: (Ex: 0) no default\n\"\"\"\n\nxyvsy_output = \"\"\"\n xyvsy :: (Ex: 0) no default\n\"\"\"\n\nxvsy_output = \"\"\"\n xvsy :: (Ex: 0) no default\n\"\"\"\n\nscatter_output = \"\"\"\n scatter :: (Ex: 0) no default\n\"\"\"\n\noutfill_output = \"\"\"\n outfill :: (Ex: 0) no default\n\"\"\"\n\noutline_output = \"\"\"\n outline :: (Ex: 0) no default\n\"\"\"\n" }, { "alpha_fraction": 0.5562157034873962, "alphanum_fraction": 0.567646861076355, "avg_line_length": 24.71953582763672, "blob_id": "a4ff2ad7b2560b9e8e00380678df47ae469b1d33", "content_id": "6618227c9edc5737bdfb81576a155caa4f2a1c4b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13297, "license_type": "no_license", "max_line_length": 119, "num_lines": 517, "path": "/vcs/queries.py", "repo_name": "embrown/vcs", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n#\n# The VCS query controls - query module\n#\n##########################################################################\n# #\n# Module: query module #\n# #\n# Copyright: \"See file Legal.htm for copyright information.\" #\n# #\n# Authors: PCMDI Software Team #\n# Lawrence Livermore NationalLaboratory: #\n# [email protected] #\n# #\n# Description: Functions which get information about vcs graphics objects #\n# such as graphics methods and templates. #\n# #\n# Version: 4.0 #\n# #\n##########################################################################\n\n\"\"\"\n###########################################################################################\n# #\n# Functions which get information about vcs graphics objects such as graphics methods. #\n# #\n###########################################################################################\n\"\"\"\nimport boxfill\nimport isofill\nimport isoline\nimport taylor\nimport meshfill\nimport unified1D\nimport vector\nimport line\nimport marker\nimport fillarea\nimport texttable\nimport textorientation\nimport textcombined\nimport template\nimport dv3d\nimport displayplot\nimport projection\nimport vcs\nimport xmldocs\n\nfrom error import vcsError\n\n\ndef isgraphicsmethod(gobj):\n \"\"\"\n Indicates if the entered argument is one of the following graphics\n methods: boxfill, isofill, isoline,\n scatter, vector, xvsy, xyvsy, yxvsx.\n\n :Example:\n\n::\n\n a=vcs.init()\n # To Modify an existing boxfill use:\n box=a.getboxfill('quick')\n #...\n if vcs.isgraphicsmethod(box):\n box.list()\n\n:param gobj: A graphics object\n:type gobj: A VCS graphics object\n\n:returns: Integer reperesenting whether gobj is one of the above graphics methods. 1 indicates true, 0 indicates false.\n:rtype:\n\"\"\"\n import vcsaddons\n if (isinstance(gobj, boxfill.Gfb)):\n return 1\n elif (isinstance(gobj, isofill.Gfi)):\n return 1\n elif (isinstance(gobj, dv3d.Gf3Dscalar)):\n return 1\n elif (isinstance(gobj, dv3d.Gf3DDualScalar)):\n return 1\n elif (isinstance(gobj, dv3d.Gf3Dvector)):\n return 1\n elif (isinstance(gobj, isoline.Gi)):\n return 1\n elif (isinstance(gobj, vector.Gv)):\n return 1\n elif (isinstance(gobj, unified1D.G1d)):\n return 1\n elif (isinstance(gobj, taylor.Gtd)):\n return 1\n elif (isinstance(gobj, meshfill.Gfm)):\n return 1\n elif isinstance(gobj, vcsaddons.core.VCSaddon):\n return 1\n else:\n return 0\n\n\ndef graphicsmethodlist():\n \"\"\"\n List available graphics methods.\n\n :Example:\n\n::\n\n a=vcs.init()\n # Return graphics method list\n gm_list=a.graphicsmethodlist()\n\n:returns: A list of available grapics methods (i.e., boxfill, isofill, isoline, outfill,\n scatter, vector, xvsy, xyvsy, yxvsx, taylordiagram ).\n:rtype: list\n\"\"\"\n return ['boxfill', 'isofill', 'isoline', 'meshfill', 'scatter',\n 'vector', 'xvsy', 'xyvsy', 'yxvsx', 'taylordiagram', '1d', '3d_scalar', '3d_dual_scalar', '3d_vector']\n\n\ndef graphicsmethodtype(gobj):\n \"\"\"\n Check the type of a graphics object.\n\n Returns a None if the object is not a graphics method.\n\n :Example:\n\n::\n\n a=vcs.init()\n # Get an existing boxfill graphics method in VCS\n box=a.getboxfill('quick')\n # Get an existing isofill graphics method in VCS\n iso=a.getisofill('quick')\n # Get an existing line element in VCS\n ln=a.getline('quick')\n #...\n # Will print 'boxfill'\n print vcs.graphicsmethodtype(box)\n # Will print 'isofill'\n print vcs.graphicsmethodtype(iso)\n # Will print None, because ln is not a graphics method\n print vcs.graphicsmethodtype(ln)\n\n:returns: If gobj is a graphics method object, returns its type: 'boxfill', 'isofill', 'isoline',\n 'scatter', 'vector', 'xvsy', 'xyvsy', or 'yxvsx', 'taylordiagram'.\n If gobj is not a graphics method object, returns None.\n:rtype: str or None\n \"\"\"\n import vcsaddons\n if (isinstance(gobj, boxfill.Gfb)):\n return 'boxfill'\n elif (isinstance(gobj, isofill.Gfi)):\n return 'isofill'\n elif (isinstance(gobj, dv3d.Gf3Dscalar)):\n return '3d_scalar'\n elif (isinstance(gobj, dv3d.Gf3DDualScalar)):\n return '3d_dual_scalar'\n elif (isinstance(gobj, dv3d.Gf3Dvector)):\n return '3d_vector'\n elif (isinstance(gobj, isoline.Gi)):\n return 'isoline'\n elif (isinstance(gobj, vector.Gv)):\n return 'vector'\n elif (isinstance(gobj, unified1D.G1d)):\n return \"1d\"\n elif (isinstance(gobj, taylor.Gtd)):\n return 'taylordiagram'\n elif (isinstance(gobj, meshfill.Gfm)):\n return 'meshfill'\n elif isinstance(gobj, vcsaddons.core.VCSaddon):\n return gobj\n else:\n raise vcsError('The object passed is not a graphics method object.')\n\n\ndef isplot(pobj):\n \"\"\"\n Check to see if this object is a VCS secondary display plot.\n\n :Example:\n\n::\n\n a=vcs.init()\n # Show all available display\n a.show('display')\n # To test an existing display object\n test_obj = a.getdisplay('default')\n # ...\n if queries.isdisplay(test_obj):\n test_obj.list()\n\n:param obj: A VCS object\n:type obj: VCS Object\n\n:returns: An integer indicating whether the object is a display plot (1), or not (0).\n:rtype: int\n\"\"\"\n if (isinstance(pobj, displayplot.Dp)):\n return 1\n else:\n return 0\n\n\ndef iscolormap(obj):\n \"\"\"\n Check to see if this object is a VCS secondary colormap.\n\n :Example:\n\n::\n\n a=vcs.init()\n # Show all available colormap\n a.show('colormap')\n # To test an existing colormap object\n test_obj = a.getcolormap('default')\n # ...\n if queries.iscolormap(test_obj):\n test_obj.list()\n\n:param obj: A VCS object\n:type obj: VCS Object\n\n:returns: An integer indicating whether the object is a colormap (1), or not (0).\n:rtype: int\n\"\"\"\n if (isinstance(obj, vcs.colormap.Cp)):\n return 1\n else:\n return 0\n\n\ndef istemplate(gobj):\n \"\"\"\n Check to see if this object is a template.\n\n :Example:\n\n::\n\n a=vcs.init()\n # Show all available template\n a.show('template')\n # To test an existing template object\n test_obj = a.gettemplate('default')\n # ...\n if queries.istemplate(test_obj):\n test_obj.list()\n\n:param obj: A VCS object\n:type obj: VCS Object\n\n:returns: An integer indicating whether the object is a template (1), or not (0)\n:rtype: int\n\"\"\"\n if (isinstance(gobj, template.P)):\n return 1\n else:\n return 0\n\n\ndef issecondaryobject(sobj):\n \"\"\"\n Check to see if this object is a VCS secondary object\n\n .. note::\n Secondary objects will be one of the following:\n 1.) colormap: specification of combinations of 256 available\n colors\n 2.) fill area: style, style index, and color index\n 3.) format: specifications for converting numbers to display\n strings\n 4.) line: line type, width, and color index\n 5.) list: a sequence of pairs of numerical and character values\n 6.) marker: marker type, size, and color index\n 7.) text table: text font type, character spacing, expansion, and\n color index\n 8.) text orientation: character height, angle, path, and\n horizontal/vertical alignment\n 9.) projections\n\n :Example:\n\n::\n\n a=vcs.init()\n # Show all available lines\n a.show('line')\n # To test an existing line object\n test_obj = a.getprojection('default')\n # ...\n if queries.issecondaryobject(test_obj):\n test_obj.list()\n\n:param obj: A VCS object\n:type obj: VCS Object\n\n:returns: An integer indicating whether the object is a projection graphics object (1), or not (0).\n:rtype: int\n\"\"\"\n if (isinstance(sobj, line.Tl)):\n return 1\n elif (isinstance(sobj, marker.Tm)):\n return 1\n elif (isinstance(sobj, fillarea.Tf)):\n return 1\n elif (isinstance(sobj, texttable.Tt)):\n return 1\n elif (isinstance(sobj, textorientation.To)):\n return 1\n elif (isinstance(sobj, textcombined.Tc)):\n return 1\n elif (isinstance(sobj, marker.Tm)):\n return 1\n elif (isinstance(sobj, projection.Proj)):\n return 1\n elif (isinstance(sobj, vcs.colormap.Cp)):\n return 1\n else:\n return 0\n\n\ndef isprojection(obj):\n \"\"\"\n Check to see if this object is a VCS secondary projection graphics object.\n\n :Example:\n\n::\n\n a=vcs.init()\n # Show all available projection\n a.show('projection')\n # To test an existing projection object\n test_obj = a.getprojection('default')\n # ...\n if queries.isprojection(test_obj):\n test_obj.list()\n\n:param obj: A VCS object\n:type obj: VCS Object\n\n:returns: An integer indicating whether the object is a projection graphics object (1), or not (0).\n:rtype: int\n\"\"\"\n if (isinstance(obj, projection.Proj)):\n return 1\n else:\n return 0\n\n\ndef istaylordiagram(obj):\n if (isinstance(obj, taylor.Gtd)):\n return 1\n else:\n return 0\nistaylordiagram.__doc__ = xmldocs.istaylordiagram_doc\n\n\ndef ismeshfill(obj):\n if (isinstance(obj, meshfill.Gfm)):\n return 1\n else:\n return 0\nismeshfill.__doc__ = xmldocs.ismeshfill_doc\n\n\ndef isboxfill(obj):\n if (isinstance(obj, boxfill.Gfb)):\n return 1\n else:\n return 0\nisboxfill.__doc__ = xmldocs.isboxfill_doc\n\n\ndef is3d_scalar(obj):\n if (isinstance(obj, dv3d.Gf3Dscalar) or isinstance(obj, dv3d.Gf3DDualScalar)):\n return 1\n else:\n return 0\nis3d_scalar.__doc__ = xmldocs.is3d_scalar_doc\n\n\ndef is3d_dual_scalar(obj):\n if isinstance(obj, dv3d.Gf3DDualScalar):\n return 1\n else:\n return 0\nis3d_dual_scalar.__doc__ = xmldocs.is3d_dual_scalar_doc\n\n\ndef is3d_vector(obj):\n if (isinstance(obj, dv3d.Gf3Dvector)):\n return 1\n else:\n return 0\nis3d_vector.__doc__ = xmldocs.is3d_vector_doc\n\n\ndef isisofill(obj):\n if (isinstance(obj, isofill.Gfi)):\n return 1\n else:\n return 0\nisisofill.__doc__ = xmldocs.isisofill_doc\n\n\ndef isisoline(obj):\n if (isinstance(obj, isoline.Gi)):\n return 1\n else:\n return 0\nisisoline.__doc__ = xmldocs.isisoline_doc\n\n\ndef isscatter(obj):\n if (isinstance(obj, unified1D.G1d)) and obj.g_type == \"scatter\":\n return 1\n else:\n return 0\nisscatter.__doc__ = xmldocs.isscatter_doc\n\n\ndef isxyvsy(obj):\n if (isinstance(obj, unified1D.G1d)) and obj.g_type == \"xyvsy\":\n return 1\n else:\n return 0\nisxyvsy.__doc__ = xmldocs.isxyvsy_doc\n\n\ndef isyxvsx(obj):\n if (isinstance(obj, unified1D.G1d)) and obj.g_type == \"yxvsx\":\n return 1\n else:\n return 0\nisyxvsx.__doc__ = xmldocs.isyxvsx_doc\n\n\ndef isxvsy(obj):\n if (isinstance(obj, unified1D.G1d)) and obj.g_type == \"yxvsx\":\n return 1\n else:\n return 0\nisxvsy.__doc__ = xmldocs.isxvsy_doc\n\n\ndef is1d(obj):\n if (isinstance(obj, unified1D.G1d)):\n return 1\n else:\n return 0\nis1d.__doc__ = xmldocs.is1d_doc\n\n\ndef isvector(obj):\n if (isinstance(obj, vector.Gv)):\n return 1\n else:\n return 0\nisvector.__doc__ = xmldocs.isvector_doc\n\n\ndef isline(obj):\n if (isinstance(obj, line.Tl)):\n return 1\n else:\n return 0\nisline.__doc__ = xmldocs.isline_doc\n\n\ndef ismarker(obj):\n if (isinstance(obj, marker.Tm)):\n return 1\n else:\n return 0\nismarker.__doc__ = xmldocs.ismarker_doc\n\n\ndef isfillarea(obj):\n if (isinstance(obj, fillarea.Tf)):\n return 1\n else:\n return 0\nisfillarea.__doc__ = xmldocs.isfillarea_doc\n\n\ndef istexttable(obj):\n if (isinstance(obj, texttable.Tt)):\n return 1\n else:\n return 0\nistexttable.__doc__ = xmldocs.istexttable_doc\n\n\ndef istextorientation(obj):\n if (isinstance(obj, textorientation.To)):\n return 1\n else:\n return 0\nistextorientation.__doc__ = xmldocs.istextorientation_doc\n\n\ndef istextcombined(obj):\n if (isinstance(obj, textcombined.Tc)):\n return 1\n else:\n return 0\nistextcombined.__doc__ = xmldocs.istextcombined_doc\n\n# Set an alias for the secondary text combined method in VCS. #\n# This is much easier to type than 'textcombined'. #\nistext = istextcombined\n" } ]
3
houjingyi233/ALPC-fuzz-study
https://github.com/houjingyi233/ALPC-fuzz-study
2f3a26c5e80b9c80503fc18e4cc854d7630a19bf
73849130bf3c1cfdd0331886d053f06cc347f733
f50d177d8482607b28255ce39fe6525e78e7ecbe
refs/heads/master
2020-04-15T16:43:45.081965
2019-01-10T02:31:30
2019-01-10T02:31:30
164,848,177
23
11
null
null
null
null
null
[ { "alpha_fraction": 0.6293103694915771, "alphanum_fraction": 0.8103448152542114, "avg_line_length": 37.66666793823242, "blob_id": "ebe0c1b7a568d6bedcb6eb15a758b9c71720ed0c", "content_id": "ce9761f4be00010dae2cf19df7ee25b1af6acb8c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 116, "license_type": "no_license", "max_line_length": 96, "num_lines": 3, "path": "/README.md", "repo_name": "houjingyi233/ALPC-fuzz-study", "src_encoding": "UTF-8", "text": "# ALPC-fuzz-study\n\nYou can find some notes on:https://cert.360.cn/report/detail?id=44669690fc7a8daab42472cebd8cfb88\n" }, { "alpha_fraction": 0.35985633730888367, "alphanum_fraction": 0.36579638719558716, "avg_line_length": 42.873966217041016, "blob_id": "f571a90e6ec67f742f60c18240d3d5c8bd72fb17", "content_id": "74d4a532e50e2055922bb064f2bc7d7df09b51c1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 21717, "license_type": "no_license", "max_line_length": 222, "num_lines": 484, "path": "/transfer.py", "repo_name": "houjingyi233/ALPC-fuzz-study", "src_encoding": "UTF-8", "text": "import re\r\nimport os\r\ndone = 0\r\n\r\ndef get_all_py():\r\n for root,dirs,files in os.walk(\"D:\\\\ALPC-FUZZ\\\\interfaces\"):\r\n myfiles=[]\r\n for file in files:\r\n if os.path.splitext(file)[1] == '.py' and file != 'transfer.py':\r\n myfiles.append(file)\r\n return myfiles\r\n\r\n# first check if the decompile results are right\r\ndef check_decompile(file):\r\n f = open(file, \"r+\")\r\n while True:\r\n str = f.readline()\r\n # symbol failed to load,go to next file\r\n if not str:\r\n f.close()\r\n return 0\r\n matchobj = re.search(r'Proc(.*)_(.*)',str)\r\n # symbol successfully loaded,we can transfer this file\r\n if matchobj:\r\n f.close()\r\n return 1\r\n\r\ndef transfer(file):\r\n done = 0\r\n FirstFunction = 1\r\n\r\n fold = open(file, \"r+\")\r\n fnew = open(\"D:\\\\ALPC-FUZZ\\\\transfer\\\\\"+file,\"w+\")\r\n finterfaces = open(\"D:\\\\ALPC-fuzz\\\\interfaces\\\\Endpoints.txt\", \"r+\")\r\n\r\n # print \"now transfer \"+file\r\n interface_name=file[:-3]\r\n\r\n # we need this for output topological sort\r\n struct_str = {}\r\n struct_dict = {}\r\n\r\n while not done:\r\n str = fold.readline()\r\n if(str != \"\"):\r\n\r\n contain_array = []\r\n # deal with structure like\"typedef struct Struct_x_t\"\r\n matchobj = re.search(r'typedef(.*)Struct_(?P<count>(.*))_t', str)\r\n if matchobj:\r\n str = fold.readline()\r\n struct_name = \"Struct_\"+matchobj.groupdict()['count']\r\n struct_str[struct_name] = \"class Struct_\" + matchobj.groupdict()['count'] + \"_t(NdrStructure):\\n\"\r\n struct_str[struct_name] += \" MEMBERS = [\"\r\n\r\n while(str.find(\"}\") == -1):\r\n str = fold.readline()\r\n\r\n write_str = \"\"\r\n\r\n #deal with size_is\r\n matchobj = re.search(r'(?P<SizeIs>(size_is\\(StructMember(?P<SizeCount>(\\d*))\\)\\]))',str)\r\n if matchobj:\r\n write_str += \"SizeIs(\"\r\n write_str += matchobj.groupdict()['SizeCount']\r\n write_str += \")/\"\r\n\r\n #deal with case like \"xxx StructMemberx[xxx]\"\r\n #deal with case like \"xxx StructMemberx[xxx][xxx]\"\r\n matchobj = re.search(r'(?P<type>(small|char|long|byte|wchar_t|short|hyper|struct(.*)Struct_(?P<structcount>(.*))_t))(.*)StructMember(\\d*)(\\[(?P<arraycount1>(\\d*))\\])?(\\[(?P<arraycount2>(\\d*))\\])?', str)\r\n if matchobj:\r\n if matchobj.groupdict()['type'] == \"long\":\r\n write_str += \"NdrLong, \"\r\n if matchobj.groupdict()['type'] == \"small\":\r\n write_str += \"NdrSmall, \"\r\n #just treat char as byte\r\n if matchobj.groupdict()['type'] == \"byte\" or matchobj.groupdict()['type'] == \"char\":\r\n write_str += \"NdrByte, \"\r\n if matchobj.groupdict()['type'] == \"wchar_t\":\r\n if str.find(\"[string]\") != -1:\r\n write_str += \"NdrWString, \"\r\n else:\r\n write_str += \"NdrWChar, \"\r\n if matchobj.groupdict()['type'] == \"short\":\r\n write_str += \"NdrShort, \"\r\n if matchobj.groupdict()['type'] == \"hyper\":\r\n write_str += \"NdrHyper, \"\r\n if matchobj.groupdict()['type'].find(\"struct\") != -1:\r\n write_str += \"Struct_\" + matchobj.groupdict()['structcount'] + \"_t, \"\r\n contain_name = 'Struct_' + matchobj.groupdict()['structcount']\r\n if contain_name not in contain_array:\r\n contain_array.append(contain_name)\r\n\r\n if matchobj.groupdict()['arraycount1'] != None and matchobj.groupdict()['arraycount1'] != \"\":\r\n if matchobj.groupdict()['arraycount2'] != None and matchobj.groupdict()['arraycount2'] != \"\":\r\n number = int(matchobj.groupdict()['arraycount1'])*int(matchobj.groupdict()['arraycount2'])\r\n else:\r\n number = int(matchobj.groupdict()['arraycount1'])\r\n while number:\r\n struct_str[struct_name] += write_str\r\n number = number - 1\r\n else:\r\n struct_str[struct_name] += write_str\r\n\r\n #end\r\n matchobj = re.search(r'}Struct_(.*)_t',str)\r\n if matchobj:\r\n struct_str[struct_name] += \"]\\n\\n\"\r\n struct_dict[struct_name] = contain_array\r\n\r\n # deal with union like\"class Union_x_t(NdrUnion)\"\r\n matchobj = re.search(r'typedef(.*)\\[switch_type(?P<type>(.*))\\](.*)union(.*)union_(?P<count>(.*))', str)\r\n if matchobj:\r\n union_name = \"Union_\" + matchobj.groupdict()['count']\r\n struct_str[union_name] = \"class Union_\" + matchobj.groupdict()['count'] + \"_t(NdrUnion):\\n\"\r\n struct_str[union_name] += \" SWITCHTYPE = \"\r\n\r\n if matchobj.groupdict()['type'].find(\"double\") != -1 \\\r\n or matchobj.groupdict()['type'].find(\"float\") != -1 \\\r\n or matchobj.groupdict()['type'].find(\"long\") != -1 \\\r\n or matchobj.groupdict()['type'].find(\"__int3264\") != -1:\r\n struct_str[union_name] += \"NdrLong\\n\"\r\n\r\n if matchobj.groupdict()['type'].find(\"short\") != -1:\r\n struct_str[union_name] += \"NdrShort\\n\"\r\n\r\n if matchobj.groupdict()['type'].find(\"small\") != -1:\r\n struct_str[union_name] += \"NdrSmall\\n\"\r\n\r\n struct_str[union_name] += \" MEMBERS = {\"\r\n\r\n str = fold.readline()\r\n\r\n while(str.find(\"}\") == -1):\r\n\r\n isunique = 0\r\n str = fold.readline()\r\n\r\n if str.find(\"no default member\") != -1:\r\n continue\r\n if str.find(\"An exception will\") != -1:\r\n continue\r\n\r\n if str.find(\"unique\") != -1:\r\n isunique = 1\r\n\r\n #ignore default\r\n matchobj = re.search(r'\\[default\\]', str)\r\n if matchobj:\r\n continue\r\n\r\n #I really do not know how to deal with cases like\"[case(x)] [unique]interface(xxxxxxxx-xxxx-xxxx-xxxxxxxxxx)* unionMember_x\"\r\n matchobj = re.search(r'interface', str)\r\n if matchobj:\r\n continue\r\n\r\n #make sure the serial number\r\n matchobj = re.search(r'\\[case\\((?P<number>(\\d*))\\)\\](.*)', str)\r\n if matchobj:\r\n struct_str[union_name] += matchobj.groupdict()['number']\r\n struct_str[union_name] += \": \"\r\n\r\n #deal with case like\"[case(x)] [unique]struct Struct_x_t* unionMember_x\"\r\n matchobj = re.search(r'struct(.*)Struct_(?P<count>(.*))_t', str)\r\n if matchobj:\r\n if isunique:\r\n struct_str[union_name] += 'NdrUniquePTR(Struct_'+matchobj.groupdict()['count']+'_t), '\r\n else:\r\n struct_str[union_name] += 'NdrPtr(Struct_' + matchobj.groupdict()['count'] + '_t), '\r\n contain_name='Struct_' + matchobj.groupdict()['count']\r\n if contain_name not in contain_array:\r\n contain_array.append(contain_name)\r\n\r\n #deal with case like\"[case(x)]\thyper unionMember_x\"\r\n if str.find(\"hyper\") != -1:\r\n if isunique:\r\n struct_str[union_name] += 'NdrUniquePTR(NdrHyper), '\r\n else:\r\n struct_str[union_name] += 'NdrHyper, '\r\n\r\n #deal with case like\"[case(x)] float|double|long|__int3264 unionMember_x\"\r\n if str.find(\"double\") != -1 or str.find(\"float\") != -1 or str.find(\"long\") != -1 or str.find(\"__int3264\") != -1:\r\n if isunique:\r\n struct_str[union_name] += 'NdrUniquePTR(NdrLong), '\r\n else:\r\n struct_str[union_name] += 'NdrLong, '\r\n\r\n #deal with case like\"[case(x)] short unionMember_x\"\r\n if str.find(\"short\") != -1:\r\n if isunique:\r\n struct_str[union_name] += 'NdrUniquePTR(NdrShort), '\r\n else:\r\n struct_str[union_name] += 'NdrShort, '\r\n\r\n #deal with case like\"[case(x)] /* FC_ZERO */\"\r\n #I really do not know how to deal with this\r\n if str.find(\"FC_ZERO\") != -1:\r\n struct_str[union_name] += '0, '\r\n\r\n #deal with case like\"[case(x)] small unionMember_x\"\r\n if str.find(\"small\") != -1:\r\n if isunique:\r\n struct_str[union_name] += 'NdrUniquePTR(NdrSmall), '\r\n else:\r\n struct_str[union_name] += 'NdrSmall, '\r\n\r\n #deal with case like\"[case(x)]\tbyte unionMember_x\"\r\n if str.find(\"byte\") != -1:\r\n if isunique:\r\n struct_str[union_name] += 'NdrUniquePTR(NdrByte), '\r\n else:\r\n struct_str[union_name] += 'NdrByte, '\r\n\r\n #deal with case like\"[case(x)] [unique][string] wchar_t** unionMember_x\"\r\n matchobj = re.search(r'\\[unique\\]\\[string\\](.*)wchar_t', str)\r\n if matchobj:\r\n struct_str[union_name] += 'NdrUniquePTR(NdrWString), '\r\n else:\r\n #deal with case like\"[case(x)] [unique]char *unionMember_x \"\r\n if str.find(\"char\") != -1:\r\n if isunique:\r\n struct_str[union_name] += 'NdrUniquePTR(NdrWChar), '\r\n else:\r\n struct_str[union_name] += 'NdrWChar, '\r\n\r\n #end\r\n matchobj = re.search(r'\\}(.*)union_(.*)', str)\r\n if matchobj:\r\n struct_str[union_name] += \"}\\n\\n\"\r\n struct_dict[union_name] = contain_array\r\n break\r\n\r\n # prase function\r\n matchobj = re.search(r'Proc(.*?)_(?P<function>(.*))\\(', str)\r\n\r\n if matchobj:\r\n\r\n # I do not how,there are still many structures used in function not defined\r\n # I think I just have to ignore these functions\r\n pos = fold.tell()\r\n str_temp = fold.readline()\r\n flag1 = 0\r\n \r\n while str_temp.find(\"Proc\") == -1 and str_temp.find(\"}\") == -1:\r\n\r\n matchobj_temp = re.search(r'\\[in\\](.*)struct(.*)Struct_(?P<count>(.*))_t', str_temp)\r\n\r\n if matchobj_temp:\r\n struct_str_temp = \"Struct_\"+matchobj_temp.groupdict()['count']\r\n if struct_str.has_key(struct_str_temp) == False:\r\n flag1 = 1\r\n break\r\n\r\n str_temp = fold.readline()\r\n\r\n fold.seek(pos, 0)\r\n\r\n # we have to define structure and union after we read them all\r\n if FirstFunction:\r\n fnew.write(\"from rpc_forge import *\\n\")\r\n while True:\r\n flag2 = 0\r\n for item in struct_dict:\r\n\r\n if struct_dict[item] == [] or cmp(\"\".join(struct_dict[item]),item) == 0:\r\n fnew.write(struct_str[item])\r\n flag2 = 1\r\n struct_dict[item] = \"NULL\"\r\n for items in struct_dict:\r\n if item in struct_dict[items]:\r\n struct_dict[items].remove(item)\r\n\r\n if flag2 == 0:\r\n break\r\n\r\n fnew.write(\"interface = Interface(\\\"\"+interface_name+\"\\\", (1,0), [\\n\")\r\n\r\n FirstFunction = 0\r\n\r\n if flag1 == 0:\r\n\r\n fnew.write(\"Method(\\\"\" + matchobj.groupdict()[\"function\"] + \"\\\", 1,\\n\")\r\n \r\n IsFirstTime = 1\r\n \r\n while True:\r\n \r\n In = 0\r\n Out = 0\r\n \r\n brackets = 0\r\n \r\n # IsRange = 0\r\n \r\n IsRef = 0\r\n IsChar = 0\r\n IsLong = 0\r\n IsSize = 0\r\n IsHyper = 0\r\n IsShort = 0\r\n IsSmall = 0\r\n IsSwitch = 0\r\n IsUnique = 0\r\n IsStruct = 0\r\n IsString = 0\r\n IsContext = 0\r\n \r\n SizeCount = 0\r\n UnionCount = 0\r\n StructCount = 0\r\n SwitchCount = 0\r\n \r\n str = fold.readline()\r\n \r\n matchobj = re.search(r'\\[(?P<IsIn>(in))\\]',str)\r\n if matchobj:\r\n In = 1\r\n \r\n # we just ignore out put\r\n matchobj = re.search(r'\\[(?P<IsOut>(out))\\]', str)\r\n if matchobj and In == 0:\r\n continue\r\n \r\n matchobj = re.search(r'\\[(?P<IsRef>(ref))\\]', str)\r\n if matchobj:\r\n IsRef = 1\r\n \r\n matchobj = re.search(r'(?P<IsChar>(char|byte))', str)\r\n if matchobj:\r\n IsChar = 1\r\n \r\n matchobj = re.search(r'(?P<IsLong>(double|float|long|__int3264))', str)\r\n if matchobj:\r\n IsLong = 1\r\n \r\n matchobj = re.search(r'(?P<IsShort>(short))', str)\r\n if matchobj:\r\n IsShort = 1\r\n \r\n matchobj = re.search(r'(?P<IsHyper>(hyper))', str)\r\n if matchobj:\r\n IsHyper = 1\r\n \r\n matchobj = re.search(r'(?P<IsSmall>(small))', str)\r\n if matchobj:\r\n IsSmall = 1\r\n \r\n matchobj = re.search(r'\\[(?P<IsSize>(size_is\\(arg_(?P<sizecount>(\\d*))\\)))\\]', str)\r\n if matchobj:\r\n IsSize = 1\r\n SizeStr = matchobj.groupdict()[\"IsSize\"]\r\n SizeCount = matchobj.groupdict()[\"sizecount\"]\r\n \r\n # I want to ignore problem about range\r\n '''\r\n matchobj = re.search(r'\\[(?P<IsRange>(range(.*)))\\]', str)\r\n if matchobj:\r\n IsRange = 1\r\n RangeStr = matchobj.groupdict()[\"IsRange\"]\r\n \r\n #this means it is actually commented\r\n matchobj = re.search(r'/\\*\\[(?P<IsRange>(range(.*)))\\]\\*/', str)\r\n if matchobj:\r\n IsRange = 0\r\n '''\r\n \r\n matchobj = re.search(r'\\[(?P<IsSwitch>(switch_is\\(arg_(?P<switchcount>(.*))\\)))\\]', str)\r\n if matchobj:\r\n IsSwitch = 1\r\n SwitchCount = matchobj.groupdict()[\"switchcount\"]\r\n matchobj = re.search(r'(union union_(?P<unioncount>\\d*))', str)\r\n if matchobj:\r\n UnionCount = matchobj.groupdict()[\"unioncount\"]\r\n else:\r\n print \"sth seems wrong.\\n\"\r\n exit()\r\n \r\n matchobj = re.search(r'\\[(?P<IsUnique>(unique))\\]', str)\r\n if matchobj:\r\n IsUnique = 1\r\n \r\n matchobj = re.search(r'(?P<IsStruct>(struct Struct_(?P<count>(.*))_t))', str)\r\n if matchobj:\r\n IsStruct = 1\r\n StructCount = matchobj.groupdict()[\"count\"]\r\n \r\n matchobj = re.search(r'\\[(?P<IsString>(string))\\]', str)\r\n if matchobj:\r\n IsString = 1\r\n \r\n matchobj = re.search(r'\\[(?P<IsContext>(context_handle))\\]', str)\r\n if matchobj:\r\n IsContext = 1\r\n \r\n # no in and no out means this function is end\r\n if In == 0 and Out == 0:\r\n fnew.write(\"),\\n\")\r\n break\r\n \r\n if IsFirstTime == 0:\r\n fnew.write(\",\\n\")\r\n \r\n IsFirstTime = 0\r\n \r\n if In:\r\n fnew.write(\"In(\")\r\n brackets = brackets + 1\r\n \r\n if Out:\r\n fnew.write(\"Out(\")\r\n brackets = brackets + 1\r\n \r\n if IsRef:\r\n fnew.write(\"NdrRef(\")\r\n brackets = brackets + 1\r\n \r\n if IsUnique:\r\n fnew.write(\"NdrUniquePTR(\")\r\n brackets = brackets + 1\r\n \r\n '''\r\n if IsRange:\r\n fnew.write(RangeStr[0].upper()+RangeStr[1:]+\" / \")\r\n '''\r\n \r\n if IsSize:\r\n fnew.write(\"SizeIs(\"+SizeCount+\") / \")\r\n \r\n if IsChar:\r\n if IsString == 0:\r\n fnew.write(\"NdrByte\")\r\n \r\n if IsLong:\r\n fnew.write(\"NdrLong\")\r\n \r\n if IsShort:\r\n fnew.write(\"NdrShort\")\r\n \r\n if IsHyper:\r\n fnew.write(\"NdrHyper\")\r\n \r\n if IsSmall:\r\n fnew.write(\"NdrSmall\")\r\n \r\n if IsString:\r\n fnew.write(\"NdrWString\")\r\n \r\n if IsSwitch:\r\n fnew.write(\"SwitchIs(\"+SwitchCount+\") / Union_\"+UnionCount+\"_t\")\r\n \r\n if IsContext:\r\n fnew.write(\"NdrContextHandle\")\r\n \r\n if IsStruct:\r\n fnew.write(\"Struct_\"+StructCount+\"_t\")\r\n \r\n while brackets:\r\n fnew.write(\")\")\r\n brackets = brackets-1\r\n\r\n fnew.write(\"\\n\")\r\n\r\n else:\r\n done = 1\r\n\r\n fnew.write(\"])\\n\\n\")\r\n fnew.write(\"interface.is_registered = True\\n\\n\")\r\n fnew.write(\"interface.endpoints = []\\n\")\r\n\r\n str = finterfaces.readline()\r\n str = str.rstrip()\r\n while (str != \"\"):\r\n fnew.write(\"interface.endpoints.append(\\\"\" + str + \"\\\")\\n\")\r\n str = finterfaces.readline()\r\n str = str.rstrip()\r\n fnew.close()\r\n finterfaces.close()\r\n\r\n\r\nif __name__ == '__main__':\r\n myfiles = get_all_py()\r\n for file in myfiles:\r\n if check_decompile(file):\r\n transfer(file)" } ]
2
Deeptradingfx/python-gymz
https://github.com/Deeptradingfx/python-gymz
aa27ab03b5c13890e2218693ef66724d3cade401
fdacdb40ff28bca5d849874fa7d8d9954b2ed683
2ac9c69babaf5e0ddd645c804cd1e5f6d8a752ea
refs/heads/master
2020-05-09T12:10:00.267410
2017-10-26T03:46:24
2017-10-26T03:46:24
181,104,051
1
0
null
2019-04-13T01:08:28
2018-12-24T03:54:25
2017-10-26T03:46:25
null
[ { "alpha_fraction": 0.6751301884651184, "alphanum_fraction": 0.6787109375, "avg_line_length": 31.680850982666016, "blob_id": "7b01114c39b1777e74bb1341c82adf2434b8fac6", "content_id": "35fc6d3347df836c7b16d740358ba9566d001e2f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3072, "license_type": "permissive", "max_line_length": 136, "num_lines": 94, "path": "/gymz-controller", "repo_name": "Deeptradingfx/python-gymz", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"\ngymz-controller.\nStart threads for running an environment in an emulator and expose\ninput/output/reward buffers via ZeroMQ.\n\nUsage:\n gymz-controller <emulator> <config> [--verbosity VERBOSITY]\n gymz-controller -h | --help\n gymz-controller --version\n\nOptions:\n -h --help Show this screen.\n --version Show version.\n -v --verbosity Set logging verbosity level.\n\"\"\"\n\nimport docopt\nimport json\nimport logging\nimport os\nimport signal\nimport threading\nimport time\n\nimport gymz\n\n\ndef run(args):\n\n # set up logging\n if args['--verbosity']:\n logging.basicConfig(level=args['VERBOSITY'].upper())\n else:\n logging.basicConfig(level=logging.WARNING)\n\n # Load default configuration file\n config = gymz.misc.read_default_config()\n\n # Parse user config file and update config\n with open(args['<config>'], 'r') as f:\n gymz.misc.recursively_update_dict(config, json.load(f))\n\n # Set prefix (defaults to same directory as config)\n if config['All']['prefix'] is None:\n config['All']['prefix'] = os.path.split(args['<config>'])[0]\n\n # Create wrapper instance\n if args['<emulator>'] == 'gym':\n emu = gymz.GymWrapper(config)\n else:\n raise NotImplementedError('Unknown emulator.')\n\n # Load an environment\n emu.load_env(config['Env']['env'], monitor_args=config['Env']['monitor_args'])\n\n # Set random seed if given\n if config['All']['seed'] >= 0:\n emu.seed(config['All']['seed'])\n\n # Use mutable objects (lists) as containers for buffers to retain\n # correct pointers and avoid globals\n command_buffer = emu.get_command_buffer()\n output_buffer = emu.get_output_buffer()\n reward_buffer = emu.get_reward_buffer()\n done_buffer = emu.get_done_buffer()\n exit_event = threading.Event()\n\n # Create thread objects\n runner_thread = gymz.EnvRunnerThread(0, 'runner_thread', emu, command_buffer, output_buffer, reward_buffer, config, exit_event)\n zmq_reward_sender_thread = gymz.ZMQRewardSenderThread(1, 'zmq_reward_sender_thread', reward_buffer, done_buffer, config, exit_event)\n zmq_sender_thread = gymz.ZMQObservationSenderThread(2, 'zmq_sender_thread', output_buffer, done_buffer, config, exit_event)\n zmq_receiver_thread = gymz.ZMQCommandReceiverThread(3, 'zmq_receiver_thread', command_buffer, done_buffer, config, exit_event)\n\n # Set up signal handler for SIGINT to let all threads exit\n # gracefully if user wishes to exit\n signal.signal(signal.SIGINT, gymz.misc.SignalHandler(\n exit_event, [runner_thread, zmq_reward_sender_thread, zmq_sender_thread, zmq_receiver_thread]))\n\n # Fire up the threads, they will not terminate until SIGINT is received\n runner_thread.start()\n zmq_reward_sender_thread.start()\n zmq_sender_thread.start()\n zmq_receiver_thread.start()\n\n # Keep main thread alive to be able to receive SIGINT\n while True:\n time.sleep(1000)\n\n\nif __name__ == '__main__':\n run(docopt.docopt(__doc__, version=gymz.__version__))\n" }, { "alpha_fraction": 0.5376505851745605, "alphanum_fraction": 0.5496987700462341, "avg_line_length": 28.511110305786133, "blob_id": "3da7ab8e9a6795357532f2b9e7f25aab4f4ce92f", "content_id": "83de8b45174ce8bb0b759d6ec8dd0528a4d09c11", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1328, "license_type": "permissive", "max_line_length": 78, "num_lines": 45, "path": "/setup.py", "repo_name": "Deeptradingfx/python-gymz", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport re\nfrom setuptools import setup\n\n# determine version from __init__.py without importing it\nwith open('./gymz/__init__.py', 'r') as f:\n for l in f:\n if '__version__' in l:\n try:\n version = re.compile('[0-9]+.[0-9]+.[0-9]+').search(l).group()\n except AttributeError:\n raise ValueError('Could not determine package version.')\n else:\n break\n\nsetup(\n name='gymz',\n version=version,\n author='Jakob Jordan, Philipp Weidel',\n author_email='[email protected]',\n description=('A light-weight ZeroMQ wrapper for the OpenAI Gym.'),\n license='MIT',\n keywords='openai-gym reinforcement-learning zeromq zmq',\n url='https://github.com/INM-6/python-gymz',\n packages=['gymz'],\n package_data={\n 'gymz': ['DefaultConfig.json']\n },\n scripts=['gymz-controller'],\n classifiers=[\n 'Development Status :: 3 - Alpha',\n 'License :: OSI Approved :: MIT License',\n 'Natural Language :: English',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3.5',\n 'Topic :: Scientific/Engineering',\n ],\n install_requires=[\n 'docopt',\n 'gym>=0.8.1',\n 'numpy',\n 'pyzmq',\n ]\n)\n" }, { "alpha_fraction": 0.7463365197181702, "alphanum_fraction": 0.7518948912620544, "avg_line_length": 37.80392074584961, "blob_id": "07ea52698b11f837fd2bc1c53687008e0d636fea", "content_id": "b2ee3caa3af9ef4325636e579bfee97baabfc809", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 1979, "license_type": "permissive", "max_line_length": 327, "num_lines": 51, "path": "/README.rst", "repo_name": "Deeptradingfx/python-gymz", "src_encoding": "UTF-8", "text": "gymz\n====\n\n|Python2.7| |License|\n\ngymz provides a light-weight wrapper for the `OpenAI Gym <https://gym.openai.com/>`__ to allow interaction with reinforcement-learning environments via `ZeroMQ <http://zeromq.org/>`__ sockets.\n\nThe wrapper consists of four different threads that coordinate\n\n1. performing steps in an environment\n2. receiving actions via a ZeroMQ SUB socket\n3. publishing observations via a ZeroMQ PUB socket\n4. publishing rewards via a ZeroMQ PUB socket\n\nIt was initially designed to be used in combination with `MUSIC <https://github.com/incf-music>`__ enabling online interaction between reinforcement learning environments from the OpenAI Gym and neuronal network models in simulators like `NEST <http://nest-simulator.org/>`__ or `NEURON <http://www.neuron.yale.edu/neuron/>`__.\n\nInstalling gymz\n---------------\n\ngymz is available via pip:\n\n.. code:: bash\n\n pip install gymz\n\nQuickstart\n----------\n\nAn example client is provided (``examples/random_gymz_client.py``) that connects to a running instance of the wrapper, sends random actions and prints observations and rewards received from the environment to the screen. From a terminal start the wrapper with the default configuration file:\n\n.. code:: bash\n\n gymz-controller gym DefaultConfig.json\n\nand the ``MountainCar-v0`` environment should be rendered on the screen. Afterwards start the client with:\n\n.. code:: bash\n\n python random_gymz_client.py\n\nThe client should now continously print commands, observations and rewards to the terminal. If it does not, please report the issue.\n\nCode status\n-----------\n\ngymz is in a fairly early development stage and should be used with care. Please report any unexpected behaviour you encounter and do not hesitate to create PRs.\n\n.. |Python2.7| image:: https://img.shields.io/badge/python-2.7-blue.svg\n :target: https://www.python.org/\n.. |License| image:: http://img.shields.io/:license-MIT-green.svg\n :target: https://opensource.org/licenses/MIT\n" }, { "alpha_fraction": 0.6244673132896423, "alphanum_fraction": 0.6286036372184753, "avg_line_length": 44.32954406738281, "blob_id": "7cfff7ace452483d4d0ae7e1fd2ee10e8feb095a", "content_id": "d3ef7aea96ec7002269ce43a39f6aec958bd6497", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7978, "license_type": "permissive", "max_line_length": 256, "num_lines": 176, "path": "/gymz/gym_wrapper.py", "repo_name": "Deeptradingfx/python-gymz", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport logging\nimport numpy as np\nimport os\nimport time\n\nimport gym # openai gym\nimport gym.spaces\nimport gym.wrappers\n\nfrom .wrapper_base import WrapperBase\nfrom . import messages\n\nlogger = logging.getLogger(__name__)\n\n\nclass GymWrapper(WrapperBase):\n \"\"\"Wrapper for the OpenAI Gym toolkit\"\"\"\n\n def __init__(self, config):\n WrapperBase.__init__(self)\n\n self._initial_reward = config['Env']['initial_reward']\n try:\n self._final_reward = config['Env']['final_reward']\n except KeyError:\n self._final_reward = None\n try:\n self._final_reward_null = config['Env']['final_reward_null']\n except KeyError:\n self._final_reward_null = None\n\n self._min_reward = config['Env']['min_reward']\n self._max_reward = config['Env']['max_reward']\n self._inter_trial_observation = config['Env']['inter_trial_observation']\n self._render = config['Env']['render']\n\n self._episode = 0\n self._episode_step = 0\n self._episode_reward = []\n self._episode_observation = []\n self._episode_time_start = time.time()\n\n self._monitor = config['Env']['monitor']\n if self._monitor:\n self._monitor_dir = os.path.join(config['All']['prefix'], config['Env']['monitor_dir'])\n\n def seed(self, seed):\n self._env.seed(seed)\n\n def load_env(self, env, *args, **kwargs):\n if 'monitor_args' in kwargs: # handle monitor args separately\n monitor_args = kwargs['monitor_args']\n del kwargs['monitor_args']\n\n if not self._monitor and len(monitor_args) > 0:\n logger.warn('monitoring not enabled but passing monitor arguments')\n\n self._env = gym.make(env, *args, **kwargs)\n self._check_parameters()\n\n if self._monitor:\n self._env = gym.wrappers.Monitor(self._env, self._monitor_dir, **monitor_args)\n\n def _check_parameters(self):\n if self._min_reward is not None and np.shape(self._min_reward) != ():\n raise ValueError('min_reward needs to be one dimensional. Please adjust your config.')\n if self._max_reward is not None and np.shape(self._max_reward) != ():\n raise ValueError('max_reward needs to be one dimensional. Please adjust your config.')\n if self._initial_reward is not None and np.shape(self._initial_reward) != ():\n raise ValueError('initial_reward needs to be one dimensional. Please adjust your config.')\n if self._final_reward is not None and np.shape(self._final_reward) != ():\n raise ValueError('final_reward needs to be one dimensional. Please adjust your config.')\n if self._final_reward_null is not None and np.shape(self._final_reward_null) != ():\n raise ValueError('final_reward_null needs to be one dimensional. Please adjust your config.')\n if np.shape(self._inter_trial_observation) != np.shape(self._env.observation_space.sample()):\n raise ValueError('inter_trial_observation has shape {} while the obervation space has shape {}. These need to be equal. Please adjust your config.'.format(np.shape(self._inter_trial_observation), np.shape(self._env.observation_space.sample())))\n\n def reset(self):\n self._output = self._env.reset() # reset returns initial state\n self._done_buffer[0] = False\n\n # initial reward is always assumed to be zero (by Gym), we\n # allow to overwrite it with a custom value to avoid potential\n # jumps in value from first to second state\n self._reward = self._initial_reward\n\n self._episode += 1\n self._episode_step = 0\n self._episode_reward = []\n self._episode_observation = []\n self._episode_time_start = time.time()\n\n def execute_action(self):\n # Gym expects actions in different format depending on type of\n # action space\n if isinstance(self._env.action_space, gym.spaces.Discrete):\n action = self._command_buffer[0][0]['value']\n else:\n action = [self._command_buffer[0][0]['value']]\n\n self._output, self._reward, self._done_buffer[0], _ = self._env.step(action)\n\n # record Gym output depending on type of observation space\n if isinstance(self._env.observation_space, gym.spaces.Discrete):\n self._episode_observation.append(self._output)\n elif isinstance(self._env.observation_space, gym.spaces.Box):\n self._episode_observation.append(list(self._output))\n else:\n raise NotImplementedError('Observation space {obs} not supported.'.format(obs=self._env.observation_space))\n\n self._episode_step += 1\n\n # in case user provides a reward value, overwrite the one retrieved from Gym\n if self._done_buffer[0] is True:\n if self._final_reward is not None:\n self._reward = self._final_reward\n elif abs(self._reward) < 1e-10:\n self._reward = self._final_reward_null\n\n self._episode_reward.append(self._reward)\n\n if self._render:\n self._env.render()\n\n def update_output_buffer(self):\n assert(self._output_buffer is not None)\n\n # handle Gym output depending on type of observation space\n if isinstance(self._env.observation_space, gym.spaces.Discrete):\n self._output_buffer[0] = messages.to_message(0, self._env.observation_space.n - 1, self._output)\n elif isinstance(self._env.observation_space, gym.spaces.Box):\n self._output_buffer[0] = messages.to_message(self._env.observation_space.low, self._env.observation_space.high, self._output)\n else:\n raise NotImplementedError('Observation space {obs} not supported.'.format(obs=self._env.observation_space))\n\n def get_command_buffer(self):\n if self._command_buffer is None:\n # set up buffer depending on type of action space\n if isinstance(self._env.action_space, gym.spaces.Discrete):\n self._command_buffer = [messages.to_message(0, self._env.action_space.n - 1, 0)]\n elif isinstance(self._env.action_space, gym.spaces.Box) and len(self._env.action_space.shape) == 1:\n self._command_buffer = [messages.to_message(self._env.action_space.low[0], self._env.action_space.high[0], 0.)]\n else:\n raise NotImplementedError('Action space {acts} not supported.'.format(acts=self._env.action_space))\n\n return self._command_buffer\n\n def get_output_buffer(self):\n if self._output_buffer is None:\n self._output_buffer = [[]]\n self._output = self._env.reset() # reset returns initial observation\n self.update_output_buffer()\n return self._output_buffer\n\n def clear_output_buffer(self):\n assert(self._output_buffer is not None)\n\n if isinstance(self._env.observation_space, gym.spaces.Discrete):\n self._output_buffer[0] = messages.to_message(0, self._env.observation_space.n - 1, self._inter_trial_observation)\n elif isinstance(self._env.observation_space, gym.spaces.Box) and len(self._env.observation_space.shape) == 1:\n if np.shape(self._env.observation_space.low) != np.shape(self._inter_trial_observation):\n raise ValueError('Dimensions of inter_trial_observation do not match environment.')\n self._output_buffer[0] = messages.to_message(self._env.observation_space.low, self._env.observation_space.high, self._inter_trial_observation)\n else:\n raise NotImplementedError('Observation space {obs} not supported.'.format(obs=self._env.observation_space))\n\n def report(self):\n return {\n self._episode: {\n 'reward': self._episode_reward,\n 'obervation': self._episode_observation,\n 'duration': time.time() - self._episode_time_start\n }\n }\n" }, { "alpha_fraction": 0.5613496899604797, "alphanum_fraction": 0.5644171833992004, "avg_line_length": 28.636363983154297, "blob_id": "a88283aea86449d870855a086858a451ab4bae5d", "content_id": "154e2832acfff2fbb861db0ee363fe45939e725c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 326, "license_type": "permissive", "max_line_length": 92, "num_lines": 11, "path": "/gymz/messages.py", "repo_name": "Deeptradingfx/python-gymz", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport numpy as np\n\n\ndef to_message(low, high, value):\n assert(np.shape(low) == np.shape(high) == np.shape(value))\n try:\n return [{'min': low[i], 'max': high[i], 'value': value[i]} for i in range(len(low))]\n except TypeError:\n return [{'min': low, 'max': high, 'value': value}]\n" }, { "alpha_fraction": 0.78899085521698, "alphanum_fraction": 0.8012232184410095, "avg_line_length": 35.33333206176758, "blob_id": "e8568564d31dbd14bd0baea0d12cab902bf02fa7", "content_id": "40be0f0242a0adfd37f1a719321cf99ca2b1bc72", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 327, "license_type": "permissive", "max_line_length": 69, "num_lines": 9, "path": "/gymz/__init__.py", "repo_name": "Deeptradingfx/python-gymz", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom .zmq_reward_sender_thread import ZMQRewardSenderThread\nfrom .zmq_observation_sender_thread import ZMQObservationSenderThread\nfrom .zmq_command_receiver_thread import ZMQCommandReceiverThread\nfrom .env_runner_thread import EnvRunnerThread\nfrom .gym_wrapper import GymWrapper\n\n__version__ = '0.0.2'\n" } ]
6
patirasam/Face-Morphing-DelaunayTriangulation-
https://github.com/patirasam/Face-Morphing-DelaunayTriangulation-
5e3183ddc2aa554975b1467c47666ffd2b83af86
c0cb4674cd2ed9e6c5e6da89d057093a214031df
ab6cb16a07d092132f16a8124c18d422b12a6959
refs/heads/master
2021-09-11T19:56:29.337491
2018-04-11T20:05:12
2018-04-11T20:05:12
122,506,348
8
0
null
null
null
null
null
[ { "alpha_fraction": 0.5008541941642761, "alphanum_fraction": 0.5125284790992737, "avg_line_length": 29.275861740112305, "blob_id": "232159c2fff165165a08ff8a20cd30164f241f9e", "content_id": "48085ca85cf7907f7ff269afd696090882561e38", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10536, "license_type": "no_license", "max_line_length": 91, "num_lines": 348, "path": "/delaunay_div_conq.py", "repo_name": "patirasam/Face-Morphing-DelaunayTriangulation-", "src_encoding": "UTF-8", "text": "import cv2\n\n\nclass Point(object):\n def __init__(self, x, y):\n self.x = x+0.0\n self.y = y+0.0\n self.incoming_edge = None\n\n def __str__(self):\n return \"(\" + str(self.x) + \",\" + str(self.y) + \")\"\n\n def get_tuple(self):\n return int(self.x), int(self.y)\n\n def __lt__(self, other):\n return self.y < other.y if self.x == other.x else self.x < other.x\n\n\nleft = 1\nright = 0\n\n\nclass Edge(object):\n def __init__(self, org, dest):\n self.origin = org\n self.destination = dest\n if not self.origin.incoming_edge:\n self.origin.incoming_edge = self\n if not self.destination.incoming_edge:\n self.destination.incoming_edge = self\n self.origin_next = self.origin_prev = self.dest_next = self.dest_prev = self\n\n def __str__(self):\n return str(self.origin) + \"--->\" + str(self.destination)\n\n\ndef splice(a, b, v):\n if a.origin == v:\n next = a.origin_next\n a.origin_next = b\n else:\n next = a.dest_next\n a.dest_next = b\n\n if next.origin == v:\n next.origin_prev = b\n else:\n next.dest_prev = b\n\n if b.origin == v:\n b.origin_next = next\n b.origin_prev = a\n else:\n b.dest_next = next\n b.dest_prev = a\n\n\ndef join(edge_a, pt1, edge_b, pt2, direction):\n e = Edge(pt1, pt2)\n if direction == left:\n if edge_a.origin == pt1:\n splice(edge_a.origin_prev, e, pt1)\n else:\n splice(edge_a.dest_prev, e, pt1)\n splice(edge_b, e, pt2)\n else:\n splice(edge_a, e, pt1)\n if edge_b.origin == pt2:\n splice(edge_b.origin_prev, e, pt2)\n else:\n splice(edge_b.dest_prev, e, pt2)\n return e\n\n\ndef delete_edge(edge):\n u = edge.origin\n v = edge.destination\n\n if u.incoming_edge == edge:\n u.incoming_edge = edge.origin_next\n if v.incoming_edge == edge:\n v.incoming_edge = edge.dest_next\n\n if edge.origin_next.origin == u:\n edge.origin_next.origin_prev = edge.origin_prev\n else:\n edge.origin_next.dest_prev = edge.origin_prev\n\n if edge.origin_prev.origin == u:\n edge.origin_prev.origin_next = edge.origin_next\n else:\n edge.origin_prev.dest_next = edge.origin_next\n\n if edge.dest_next.origin == v:\n edge.dest_next.origin_prev = edge.dest_prev\n else:\n edge.dest_next.dest_prev = edge.dest_prev\n\n if edge.dest_prev.origin == v:\n edge.dest_prev.origin_next = edge.dest_next\n else:\n edge.dest_prev.dest_next = edge.dest_next\n\n\ndef Other_point(edge, pt):\n return edge.destination if edge.origin == pt else edge.origin\n\n\ndef Next(edge, pt):\n return edge.origin_next if edge.origin == pt else edge.dest_next\n\n\ndef Prev(edge, pt):\n return edge.origin_prev if edge.origin == pt else edge.dest_prev\n\n\ndef get_vector(pt1, pt2):\n return pt2.x - pt1.x, pt2.y - pt1.y\n\n\ndef Cross_product_3p(pt1, pt2, pt3):\n return (pt2.x - pt1.x) * (pt3.y - pt1.y) - (pt2.y - pt1.y) * (pt3.x - pt1.x)\n\n\ndef Cross_product_2v(u1, v1, u2, v2):\n return u1 * v2 - v1 * u2\n\n\ndef Dot_product_2v(u1, v1, u2, v2):\n return u1 * u2 + v1 * v2\n\n\ndef lower_tangent(r_cw_l, s, l_ccw_r, u):\n l = r_cw_l\n r = l_ccw_r\n o_l = s\n d_l = Other_point(l, s)\n o_r = u\n d_r = Other_point(r, u)\n finished = False\n while not finished:\n if Cross_product_3p(o_l, d_l, o_r) > 0:\n l = Prev(l, d_l)\n o_l = d_l\n d_l = Other_point(l, o_l)\n elif Cross_product_3p(o_r, d_r, o_l) < 0:\n r = Next(r, d_r)\n o_r = d_r\n d_r = Other_point(r, o_r)\n else:\n finished = True\n l_lower = l\n r_lower = r\n org_l_lower = o_l\n org_r_lower = o_r\n return l_lower, org_l_lower, r_lower, org_r_lower\n\n\ndef merge(r_cw_l, s, l_ccw_r, u):\n l_lower, org_l_lower, r_lower, org_r_lower = lower_tangent(r_cw_l, s, l_ccw_r, u)\n base = join(l_lower, org_l_lower, r_lower, org_r_lower, right)\n org_base = org_l_lower\n dest_base = org_r_lower\n l_tangent = base\n while True:\n # Initialise l_cand and r_cand\n l_cand = Next(base, org_base)\n r_cand = Prev(base, dest_base)\n dest_l_cand = Other_point(l_cand, org_base)\n dest_r_cand = Other_point(r_cand, dest_base)\n\n # Vectors for above and \"in_circle\" tests.\n u_l_c_o_b, v_l_c_o_b = get_vector(dest_l_cand, org_base)\n u_l_c_d_b, v_l_c_d_b = get_vector(dest_l_cand, dest_base)\n u_r_c_o_b, v_r_c_o_b = get_vector(dest_r_cand, org_base)\n u_r_c_d_b, v_r_c_d_b = get_vector(dest_r_cand, dest_base)\n\n # Above tests.\n c_p_l_cand = Cross_product_2v(u_l_c_o_b, v_l_c_o_b, u_l_c_d_b, v_l_c_d_b)\n c_p_r_cand = Cross_product_2v(u_r_c_o_b, v_r_c_o_b, u_r_c_d_b, v_r_c_d_b)\n above_l_cand = c_p_l_cand > 0.0\n above_r_cand = c_p_r_cand > 0.0\n if not above_l_cand and not above_r_cand:\n break # Finished.\n\n if above_l_cand:\n\n d_p_l_cand = Dot_product_2v(u_l_c_o_b, v_l_c_o_b, u_l_c_d_b, v_l_c_d_b)\n cot_l_cand = d_p_l_cand / c_p_l_cand\n\n while True:\n next = Next(l_cand, org_base)\n dest_next = Other_point(next, org_base)\n u_n_o_b, v_n_o_b = get_vector(dest_next, org_base)\n u_n_d_b, v_n_d_b = get_vector(dest_next, dest_base)\n c_p_next = Cross_product_2v(u_n_o_b, v_n_o_b, u_n_d_b, v_n_d_b)\n above_next = c_p_next > 0.0\n\n if not above_next:\n break # Finished.\n\n d_p_next = Dot_product_2v(u_n_o_b, v_n_o_b, u_n_d_b, v_n_d_b)\n cot_next = d_p_next / c_p_next\n\n if cot_next > cot_l_cand:\n break # Finished.\n\n delete_edge(l_cand)\n l_cand = next\n cot_l_cand = cot_next\n\n # Now do the symmetrical for r_cand\n if above_r_cand:\n\n d_p_r_cand = Dot_product_2v(u_r_c_o_b, v_r_c_o_b, u_r_c_d_b, v_r_c_d_b)\n cot_r_cand = d_p_r_cand / c_p_r_cand\n\n while True:\n prev = Prev(r_cand, dest_base)\n dest_prev = Other_point(prev, dest_base)\n u_p_o_b, v_p_o_b = get_vector(dest_prev, org_base)\n u_p_d_b, v_p_d_b = get_vector(dest_prev, dest_base)\n c_p_prev = Cross_product_2v(u_p_o_b, v_p_o_b, u_p_d_b, v_p_d_b)\n above_prev = c_p_prev > 0.0\n\n if not above_prev:\n break # Finished.\n\n d_p_prev = Dot_product_2v(u_p_o_b, v_p_o_b, u_p_d_b, v_p_d_b)\n cot_prev = d_p_prev / c_p_prev\n\n if cot_prev > cot_r_cand:\n break # Finished.\n\n delete_edge(r_cand)\n r_cand = prev\n cot_r_cand = cot_prev\n\n dest_l_cand = Other_point(l_cand, org_base)\n dest_r_cand = Other_point(r_cand, dest_base)\n if not above_l_cand or (above_l_cand and above_r_cand and cot_r_cand < cot_l_cand):\n base = join(base, org_base, r_cand, dest_r_cand, right)\n dest_base = dest_r_cand\n else:\n base = join(l_cand, dest_l_cand, base, dest_base, right)\n org_base = dest_l_cand\n return l_tangent\n\n\ndef divide(pts, l, r):\n n = r - l + 1\n # print \"n =\", n\n if n == 2:\n # Bottom of the recursion. Make an edge\n l_ccw = r_cw = Edge(pts[l], pts[r])\n elif n == 3:\n a = Edge(pts[l], pts[l + 1])\n b = Edge(pts[l + 1], pts[r])\n splice(a, b, pts[l + 1])\n c_p = Cross_product_3p(pts[l], pts[l + 1], pts[r])\n if c_p > 0.0:\n c = join(a, pts[l], b, pts[r], right)\n l_ccw = a\n r_cw = b\n elif c_p < 0.0:\n c = join(a, pts[l], b, pts[r], left)\n l_ccw = c\n r_cw = c\n else:\n l_ccw = a\n r_cw = b\n else:\n split = (l + r) / 2\n # print \"Divide at\", 0, split\n (l_ccw_l, r_cw_l) = divide(pts, l, split)\n # print_incoming_edges(pts)\n # print \"Divide at\", split+1, r\n (l_ccw_r, r_cw_r) = divide(pts, split + 1, r)\n # print_incoming_edges(pts)\n # print \"Merging at\", split\n l_tangent = merge(r_cw_l, pts[split], l_ccw_r, pts[split + 1])\n # print_incoming_edges(pts)\n # print \"l_tangent\", l_tangent\n if l_tangent.origin == pts[l]:\n l_ccw_l = l_tangent\n if l_tangent.destination == pts[r]:\n r_cw_r = l_tangent\n l_ccw = l_ccw_l\n r_cw = r_cw_r\n # print l_ccw, r_cw\n return l_ccw, r_cw\n\n\ndef get_edges(pts):\n edge_set = set()\n for point in pts:\n start_e = point.incoming_edge\n if start_e:\n if point == start_e.origin:\n edge_set.add(start_e)\n e = Next(start_e, start_e.origin)\n print \"here\"\n while not start_e == e:\n edge_set.add(e)\n e = Next(e, start_e.origin)\n else:\n edge_set.add(start_e)\n e = Next(start_e, start_e.destination)\n print \"here\"\n while not start_e == e:\n edge_set.add(e)\n e = Next(e, start_e.destination)\n return edge_set\n\n\ndef delaunay(pts_file, src_img):\n img = src_img # .copy()\n # Create an array of points.\n points = []\n\n # Read in the points from a text file\n with open(pts_file) as file:\n for line in file:\n x, y = line.split()\n points.append(Point(int(x), int(y)))\n\n points.sort()\n divide(points, 0, len(points) - 1)\n\n for point in points:\n cv2.circle(img, point.get_tuple(), 2, (0, 0, 255), cv2.FILLED, cv2.LINE_AA, 0)\n\n edge_set = get_edges(points)\n for edge in edge_set:\n cv2.line(img, edge.origin.get_tuple(),\n edge.destination.get_tuple(), (255, 255, 255), 1, cv2.LINE_AA, 0)\n namedWindow( \"Display window\", WINDOW_AUTOSIZE );// Create a window for display.\n imshow( \"Orignal Photo\", src_img ); \n namedWindow( \"Display window\", WINDOW_AUTOSIZE );// Create a window for display.\n imshow( \"After Delaunay triangulation\", img ); \n return img\n\n\nif __name__ == '__main__':\n triangulated_img = delaunay(\"samkit_2/obama.txt\", cv2.imread(\"samkit_2/obama.jpg\"))\n cv2.imshow(\"Div_Conq Delaunay Triangulation\", triangulated_img)\n cv2.waitKey(0)\n" }, { "alpha_fraction": 0.5396618843078613, "alphanum_fraction": 0.600130021572113, "avg_line_length": 32.434783935546875, "blob_id": "b8f3eb6964ff14ef1ca7d768106a7f067e99e6c1", "content_id": "e599aca749d7fad0128c7c281b452e91c256b5a5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3076, "license_type": "no_license", "max_line_length": 108, "num_lines": 92, "path": "/face_morph.py", "repo_name": "patirasam/Face-Morphing-DelaunayTriangulation-", "src_encoding": "UTF-8", "text": "import cv2\nimport sys\nimport numpy as np\nfrom delaunay_div_conq import delaunay\nfrom feature_detector import extract_features\nfrom vid_lib import Video\n\nSRC_IMG = sys.argv[1] # \"donald_trump.jpg\"\nTARGET_IMG = sys.argv[2] # \"hillary_clinton.jpg\"\nVID_FILE = sys.argv[3]\n# SRC_IMG = \"../img_80/donald_trump.jpg\"\n# TARGET_IMG = \"../img_80/hillary_clinton.jpg\"\n# VID_FILE = 'test.avi'\n\n\ndef apply_affine_transform(src, src_tri, target_tri, size):\n warp_mat = cv2.getAffineTransform(np.float32(src_tri), np.float32(target_tri))\n dst = cv2.warpAffine(src, warp_mat, (size[0], size[1]), None, flags=cv2.INTER_LINEAR,\n borderMode=cv2.BORDER_REFLECT_101)\n return dst\n\n\ndef morph_triangle(img1, img2, img, t1, t2, t, alpha):\n r1 = cv2.boundingRect(np.float32([t1]))\n r2 = cv2.boundingRect(np.float32([t2]))\n r = cv2.boundingRect(np.float32([t]))\n\n t1_rect = []\n t2_rect = []\n t_rect = []\n\n for i in xrange(0, 3):\n t_rect.append(((t[i][0] - r[0]), (t[i][1] - r[1])))\n t1_rect.append(((t1[i][0] - r1[0]), (t1[i][1] - r1[1])))\n t2_rect.append(((t2[i][0] - r2[0]), (t2[i][1] - r2[1])))\n\n mask = np.zeros((r[3], r[2], 3), dtype=np.float32)\n cv2.fillConvexPoly(mask, np.int32(t_rect), (1.0, 1.0, 1.0), 16, 0)\n\n img1_rect = img1[r1[1]:r1[1] + r1[3], r1[0]:r1[0] + r1[2]]\n img2_rect = img2[r2[1]:r2[1] + r2[3], r2[0]:r2[0] + r2[2]]\n\n size = (r[2], r[3])\n warp_image1 = apply_affine_transform(img1_rect, t1_rect, t_rect, size)\n warp_image2 = apply_affine_transform(img2_rect, t2_rect, t_rect, size)\n\n img_rect = (1.0 - alpha) * warp_image1 + alpha * warp_image2\n\n\n img[r[1]:r[1]+r[3], r[0]:r[0]+r[2]] = img[r[1]:r[1]+r[3], r[0]:r[0]+r[2]] * (1 - mask) + img_rect * mask\n\n\n\ndef get_morph(alpha=0.5):\n\n weighted_pts = []\n for i in xrange(0, len(src_points)):\n x = (1 - alpha) * src_points[i][0] + alpha * target_points[i][0]\n y = (1 - alpha) * src_points[i][1] + alpha * target_points[i][1]\n weighted_pts.append((x, y))\n\n img_morph = np.zeros(src_img.shape, dtype=src_img.dtype)\n\n for triangle in del_triangles:\n x, y, z = triangle\n t1 = [src_points[x], src_points[y], src_points[z]]\n t2 = [target_points[x], target_points[y], target_points[z]]\n t = [weighted_pts[x], weighted_pts[y], weighted_pts[z]]\n morph_triangle(src_img, target_img, img_morph, t1, t2, t, alpha)\n\n return cv2.cvtColor(np.uint8(img_morph), cv2.COLOR_RGB2BGR)\n\n\nsrc_img = cv2.imread(SRC_IMG)\ntarget_img = cv2.imread(TARGET_IMG)\nsrc_points = extract_features(SRC_IMG)\ntarget_points = extract_features(TARGET_IMG)\n\navg_points = []\nfor i in xrange(0, len(src_points)):\n x = 0.5 * src_points[i][0] + 0.5 * target_points[i][0]\n y = 0.5 * src_points[i][1] + 0.5 * target_points[i][1]\n avg_points.append((int(x), int(y)))\n\ndel_triangles = delaunay(avg_points)\n\nvideo = Video(VID_FILE, 20, 600, 800)\nfor percent in np.linspace(1, 0, num=200):\n print 'Writing Frame', 200 - int(percent*200) + 1\n video.write(get_morph(alpha=percent))\n\nvideo.end()\n" }, { "alpha_fraction": 0.8179012537002563, "alphanum_fraction": 0.8179012537002563, "avg_line_length": 79.5, "blob_id": "d10191b81eab1e188b95e8568860e2925f89b38a", "content_id": "261abb7fcb553e069ff5a2f8b07c33433e3ef422", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 324, "license_type": "no_license", "max_line_length": 209, "num_lines": 4, "path": "/README.md", "repo_name": "patirasam/Face-Morphing-DelaunayTriangulation-", "src_encoding": "UTF-8", "text": "# DelaunayTriangulation\n- Image morphing for given source and destination images. \n- Morphing uses triangulation algorithm called Delaunay triangulation. Used divide and conquer approach for triangulation algorithm instead of iterative approach. This drastically reduces time-complexity. \n- Used OpenCV for morphing. \n" }, { "alpha_fraction": 0.6339066624641418, "alphanum_fraction": 0.6609336733818054, "avg_line_length": 29.148147583007812, "blob_id": "444b86d12b6a178f484c1c68976841c6a302bea4", "content_id": "14718e78b96965929566d95a8bd95188b8a6a798", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 814, "license_type": "no_license", "max_line_length": 78, "num_lines": 27, "path": "/feature_detector.py", "repo_name": "patirasam/Face-Morphing-DelaunayTriangulation-", "src_encoding": "UTF-8", "text": "import dlib\nfrom skimage import io\n\ndetector = dlib.get_frontal_face_detector()\npredictor = dlib.shape_predictor('shape_predictor_68_face_landmarks.dat')\n\n\ndef extract_features(img_path):\n img = io.imread(img_path)\n\n # Ask the detector to find the bounding boxes of each face. The 1 in the\n # second argument indicates that we should upsample the image 1 time. This\n # will make everything bigger and allow us to detect more faces.\n dets = detector(img, 1)\n for k, d in enumerate(dets):\n shape = predictor(img, d)\n\n vec = [(0, 0), (0, img.shape[0]-1)]\n\n for j in range(0, 68):\n vec.append((shape.part(j).x, shape.part(j).y))\n vec.append((img.shape[1]-1, 0))\n vec.append((img.shape[1]-1, img.shape[0]-1))\n\n return vec\n\n# extract_features('../img_80/donald_trump.jpg')\n" } ]
4
trishorts/Spritz
https://github.com/trishorts/Spritz
e1741be24f7701d379af10eb6b25aebfb3bbd191
cd1721b4bd31f60bd5d7e654cbbc78a9e677e682
4a63e45ee636fabc8f55779fd2c906c55ef72bf0
refs/heads/master
2022-12-04T23:38:34.184951
2020-08-25T17:16:38
2020-08-25T17:16:38
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6703296899795532, "alphanum_fraction": 0.6758241653442383, "avg_line_length": 38.8125, "blob_id": "a25ac7c0c089e1cdbc9a9adbb070e2874936acc6", "content_id": "e3993f357f533b6b68dec5d3df34cff7e2312279", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Dockerfile", "length_bytes": 1274, "license_type": "permissive", "max_line_length": 108, "num_lines": 32, "path": "/Spritz/Dockerfile", "repo_name": "trishorts/Spritz", "src_encoding": "UTF-8", "text": "FROM conda/miniconda3\nLABEL maintainer=\"Anthony Cesnik <[email protected]>\"\n\n# install dotnet\nWORKDIR /usr/bin/local\nRUN apt-get update -y && \\\n apt-get install -y wget curl gpg \\\n && wget -qO- https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > microsoft.asc.gpg \\\n && mv microsoft.asc.gpg /etc/apt/trusted.gpg.d/ \\\n && wget -q https://packages.microsoft.com/config/debian/9/prod.list \\\n && mv prod.list /etc/apt/sources.list.d/microsoft-prod.list \\\n && chown root:root /etc/apt/trusted.gpg.d/microsoft.asc.gpg \\\n && chown root:root /etc/apt/sources.list.d/microsoft-prod.list \\\n && apt-get install -y apt-transport-https \\\n && apt-get update -y \\\n && apt-get install -y dotnet-sdk-2.2\n\n# install Spritz\nWORKDIR /app\nCOPY . ./\nRUN conda init \\\n && conda update conda \\\n && conda env create --name spritz --file environment.yaml\n\n# Add script to execute snakemake upon activate\n# Note that running this script outside of activation has not worked\nADD start.sh /usr/local/envs/spritz/etc/conda/activate.d/start.sh\nRUN chmod 777 /usr/local/envs/spritz/etc/conda/activate.d/start.sh\n\n# activate environment\nRUN echo \"source activate spritz\" > ~/.bashrc\nENV PATH /opt/conda/envs/spritz/bin:$PATH\n" }, { "alpha_fraction": 0.616314172744751, "alphanum_fraction": 0.6459214687347412, "avg_line_length": 47.67647171020508, "blob_id": "453a968244e0eb12f09dbe29dc61e2648e23967c", "content_id": "5bb744eff3700905b00d8bea4a06bb5848d84a8b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1655, "license_type": "permissive", "max_line_length": 129, "num_lines": 34, "path": "/Spritz/rules/testing.smk", "repo_name": "trishorts/Spritz", "src_encoding": "UTF-8", "text": "## For generating smaller human genomes\nrule filter_gff3:\n input: \"data/ensembl/\" + REF + \".\" + config[\"release\"] + \".gff3\"\n output: \"data/ensembl/202122.gff3\"\n shell: \"grep \\\"^#\\|20\\|^21\\|^22\\\" \\\"data/ensembl/\" + REF + \".\" + config[\"release\"] + \".gff3\\\" > \\\"data/ensembl/202122.gff3\\\"\"\n\nrule fix_gff3_for_rsem:\n '''This script changes descriptive notes in column 4 to \"gene\" if a gene row, and it also adds ERCCs to the gene model'''\n input: \"data/ensembl/\" + REF + \".\" + config[\"release\"] + \".gff3\"\n output: \"data/ensembl/\" + REF + \".\" + config[\"release\"] + \".gff3\" + \".fix.gff3\"\n shell: \"python scripts/fix_gff3_for_rsem.py {input} {output}\"\n\nrule filter_fa:\n input: \"data/ensembl/\" + REF + \".dna.primary_assembly.fa\"\n output: \"data/ensembl/202122.fa\"\n script: \"../scripts/filter_fasta.py\"\n\nrule simulate_variants:\n input: FA\n output: \"data/ensembl/{SPECIES}.{GENEMODEL_VERSION}.test.vcf\"\n benchmark: \"data/ensembl/{SPECIES}.{GENEMODEL_VERSION}.test.vcf.benchmark\"\n log: \"data/ensembl/{SPECIES}.{GENEMODEL_VERSION}.test.vcf.log\"\n shell: \"mason_variator -ir {input} -ov {output} 2> {log}\"\n\nrule generate_fastqs:\n input:\n fa=FA,\n vcf=\"data/ensembl/{SPECIES}.{GENEMODEL_VERSION}.test.vcf\"\n output:\n fq1=\"data/ensembl/{SPECIES}.{GENEMODEL_VERSION}.test_1.fastq\",\n fq2=\"data/ensembl/{SPECIES}.{GENEMODEL_VERSION}.test_2.fastq\",\n benchmark: \"data/ensembl/{SPECIES}.{GENEMODEL_VERSION}.test.benchmark\"\n log: \"data/ensembl/{SPECIES}.{GENEMODEL_VERSION}.test.log\"\n shell: \"mason_simulator -ir {input.fa} -n 100000 -iv {input.vcf} -o {output.fq1} -or {output.fq2} 2> {log}\"\n" }, { "alpha_fraction": 0.6436063647270203, "alphanum_fraction": 0.6530969142913818, "avg_line_length": 53.10810852050781, "blob_id": "83f764a4aa7b2d12ed4661432ca65d17f8a03d24", "content_id": "1e38de168beb32064e67eb6addb3dc7234dec762", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4004, "license_type": "permissive", "max_line_length": 158, "num_lines": 74, "path": "/Spritz/rules/downloads.smk", "repo_name": "trishorts/Spritz", "src_encoding": "UTF-8", "text": "REF=config[\"species\"] + \".\" + config[\"genome\"]\nSPECIES_LOWER = config[\"species\"].lower()\n\nprotocol = \"http\"\nprimary = f\"{protocol}://ftp.ensembl.org/pub/release-{ENSEMBL_VERSION}//fasta/{SPECIES_LOWER}/dna/{REF}.dna.primary_assembly.fa.gz\"\ntoplevel = f\"{protocol}://ftp.ensembl.org/pub/release-{ENSEMBL_VERSION}//fasta/{SPECIES_LOWER}/dna/{REF}.dna.toplevel.fa.gz\"\ngff = f\"{protocol}://ftp.ensembl.org/pub/release-{ENSEMBL_VERSION}/gff3/{SPECIES_LOWER}/{REF}.{ENSEMBL_VERSION}.gff3.gz\"\npep = f\"{protocol}://ftp.ensembl.org/pub/release-{ENSEMBL_VERSION}//fasta/{SPECIES_LOWER}/pep/{REF}.pep.all.fa.gz\"\n\nrule download_ensembl_references:\n output:\n gfa=\"data/ensembl/\" + REF + \".dna.primary_assembly.fa\",\n gff3=\"data/ensembl/\" + REF + \".\" + config[\"release\"] + \".gff3\",\n pfa=\"data/ensembl/\" + REF + \".pep.all.fa\",\n benchmark: \"data/ensembl/downloads.benchmark\"\n log: \"data/ensembl/downloads.log\"\n shell:\n \"((wget -O - {primary} || wget -O - {toplevel}) | gunzip -c - > {output.gfa} && \"\n \"wget -O - {gff} | gunzip -c - > {output.gff3} && \"\n \"wget -O - {pep} | gunzip -c - > {output.pfa}) 2> {log}\"\n\nif SPECIES_LOWER == \"homo_sapiens\":\n rule download_dbsnp_vcf:\n '''Download dbsnp known variant sites if we are analyzing human data'''\n input: \"ChromosomeMappings/\" + config[\"genome\"] + \"_UCSC2ensembl.txt\"\n output: \"data/ensembl/\" + config[\"species\"] + \".ensembl.vcf\",\n benchmark: \"data/ensembl/downloads_dbsnp_vcf.benchmark\"\n log: \"data/ensembl/downloads_dbsnp_vcf.log\"\n shell:\n \"(wget -O - https://ftp.ncbi.nih.gov/snp/organisms/human_9606_b151_GRCh38p7/VCF/common_all_20180418.vcf.gz | \"\n \"zcat - | python scripts/convert_ucsc2ensembl.py > {output}) 2> {log}\"\nelse:\n # first get the possible VCF urls; note that Ensembl has started listing variants for each chromosome for human but not other species, but that may change\n vcf1 = f\"http://ftp.ensembl.org/pub/release-{ENSEMBL_VERSION}/variation/vcf/{SPECIES_LOWER}/{SPECIES}.vcf.gz\"\n vcf2 = f\"http://ftp.ensembl.org/pub/release-{ENSEMBL_VERSION}/variation/vcf/{SPECIES_LOWER}/{SPECIES_LOWER}.vcf.gz\"\n\n rule download_ensembl_vcf:\n '''Use Ensembl known variant sites if we are analyzing nonhuman data'''\n output: \"data/ensembl/\" + config[\"species\"] + \".ensembl.vcf\",\n benchmark: \"data/ensembl/downloads_ensembl_vcf.benchmark\"\n log: \"data/ensembl/downloads_ensembl_vcf.log\"\n shell: \"((wget -O - {vcf1} || wget -O - {vcf2}) | zcat - | python scripts/clean_vcf.py > {output}) 2> {log}\"\n\nrule index_ensembl_vcf:\n input: \"data/ensembl/\" + config[\"species\"] + \".ensembl.vcf\"\n output: \"data/ensembl/\" + config[\"species\"] + \".ensembl.vcf.idx\"\n log: \"data/ensembl/\" + config[\"species\"] + \".ensembl.vcf.idx.log\"\n shell: \"gatk IndexFeatureFile -F {input} 2> {log}\"\n\nrule download_chromosome_mappings:\n output: \"ChromosomeMappings/\" + config[\"genome\"] + \"_UCSC2ensembl.txt\"\n log: \"ChromosomeMappings/download_chromosome_mappings.log\"\n shell:\n \"(if [ -d ChromosomeMappings ]; then rm -rf ChromosomeMappings; fi && \"\n \"git clone https://github.com/dpryan79/ChromosomeMappings.git) 2> {log}\"\n\nrule reorder_genome_fasta:\n input: \"data/ensembl/\" + REF + \".dna.primary_assembly.fa\"\n output: \"data/ensembl/\" + REF + \".dna.primary_assembly.karyotypic.fa\"\n benchmark: \"data/ensembl/karyotypic_order.benchmark\"\n log: \"data/ensembl/karyotypic_order.log\"\n shell: \"python scripts/karyotypic_order.py 2> {log}\"\n\nrule dict_fa:\n input: \"data/ensembl/\" + config[\"species\"] + \".\" + config[\"genome\"] + \".dna.primary_assembly.karyotypic.fa\"\n output: \"data/ensembl/\" + config[\"species\"] + \".\" + config[\"genome\"] + \".dna.primary_assembly.karyotypic.dict\"\n shell: \"gatk CreateSequenceDictionary -R {input} -O {output}\"\n\nrule tmpdir:\n output:\n temp(directory(\"tmp\")),\n temp(directory(\"temporary\")),\n shell:\n \"mkdir tmp && mkdir temporary\"\n" }, { "alpha_fraction": 0.5997033715248108, "alphanum_fraction": 0.6038829684257507, "avg_line_length": 41.388572692871094, "blob_id": "3c2b434499386deb6af38c69b414e3eb806d0a4a", "content_id": "9590f872492070afc4018399ce8737d1c8aee40e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C#", "length_bytes": 7419, "license_type": "permissive", "max_line_length": 216, "num_lines": 175, "path": "/Spritz/GUI/WorkFlow.xaml.cs", "repo_name": "trishorts/Spritz", "src_encoding": "UTF-8", "text": "using System;\nusing System.Collections.ObjectModel;\nusing System.IO;\nusing System.Linq;\nusing System.Security;\nusing System.Security.Permissions;\nusing System.Windows;\n\nnamespace Spritz\n{\n /// <summary>\n /// Interaction logic for workflows\n /// </summary>\n public partial class WorkFlowWindow : Window\n {\n private string AnalysisDirectory { get; set; }\n public string Reference { get; set; } // define notify property changed\n public ObservableCollection<EnsemblRelease> EnsemblReleases { get; set; }\n public Options Options { get; set; } = new Options(Environment.ProcessorCount);\n private MainWindow MainWindow { get; set; }\n private int Threads { get; set; }\n\n public WorkFlowWindow(string analysisDirectory)\n {\n AnalysisDirectory = analysisDirectory;\n InitializeComponent();\n PopulateChoices();\n MainWindow = (MainWindow)Application.Current.MainWindow;\n UpdateFieldsFromTask(Options);\n DataContext = this;\n }\n\n public WorkFlowWindow(Options options)\n {\n InitializeComponent();\n PopulateChoices();\n UpdateFieldsFromTask(options);\n MainWindow = (MainWindow)Application.Current.MainWindow;\n Options = options;\n DataContext = this;\n }\n\n protected void CancelButton_Click(object sender, RoutedEventArgs e)\n {\n DialogResult = false;\n }\n\n protected void SaveButton_Click(object sender, RoutedEventArgs e)\n {\n //// Experiment type selection\n //int iii = CmbxExperimentType.SelectedIndex;\n //if (iii == 0)\n //{\n // Options.ExperimentType = ExperimentType.RNASequencing.ToString();\n //}\n //else if (iii == 1)\n //{\n // Options.ExperimentType = ExperimentType.WholeGenomeSequencing.ToString();\n //}\n //else if (iii == 2)\n //{\n // Options.ExperimentType = ExperimentType.ExomeSequencing.ToString();\n //}\n //else\n //{\n // MessageBox.Show(\"Please choose an experiment type selection.\");\n // return;\n //}\n\n Options.AnalysisDirectory = TrimQuotesOrNull(txtAnalysisDirectory.Text);\n try\n {\n string testDirectory = Path.Combine(Options.AnalysisDirectory, $\"TestSpritzPermissions{Options.AnalysisDirectory.GetHashCode()}\");\n Directory.CreateDirectory(testDirectory);\n Directory.Delete(testDirectory);\n }\n catch (Exception)\n {\n MessageBox.Show($\"Error: Cannot write to specified analysis directory: {Options.AnalysisDirectory}. Please choose another directory.\", \"Write Permissions\", MessageBoxButton.OK, MessageBoxImage.Error);\n return;\n }\n\n if (!Directory.Exists(Options.AnalysisDirectory))\n {\n Directory.CreateDirectory(Options.AnalysisDirectory);\n }\n\n Options.Threads = Threads;\n EnsemblRelease ensembl = (EnsemblRelease)EnsemblReleaseVersions.SelectedItem;\n Options.Release = ensembl.Release;\n Options.Species = EnsemblSpecies.SelectedItem.ToString();\n Options.Reference = ensembl.Genomes[Options.Species];\n Options.Organism = ensembl.Organisms[Options.Species];\n Options.AnalyzeVariants = (bool)Cb_AnalyzeVariants.IsChecked;\n Options.AnalyzeIsoforms = (bool)Cb_AnalyzeIsoforms.IsChecked;\n Options.SpritzVersion = MainWindow.CurrentVersion;\n DialogResult = true;\n }\n\n private void UpdateFieldsFromTask(Options options)\n {\n // Get information about the fastq and sra selections\n var rnaSeqFastqCollection = (ObservableCollection<RNASeqFastqDataGrid>)MainWindow.DataGridRnaSeqFastq.DataContext;\n Options.Fastq1 = string.Join(\",\", rnaSeqFastqCollection.Where(p => p.MatePair == 1.ToString()).OrderBy(p => p.FileName).Select(p => p.FileName.Substring(0, p.FileName.Length - 2)).ToArray());\n Options.Fastq2 = string.Join(\",\", rnaSeqFastqCollection.Where(p => p.MatePair == 2.ToString()).OrderBy(p => p.FileName).Select(p => p.FileName.Substring(0, p.FileName.Length - 2)).ToArray());\n\n //use RNAFastqCollection instead\n var fq1s = Options.Fastq1.Split(',') ?? new string[0];\n var fq2s = Options.Fastq2.Split(',') ?? new string[0];\n\n foreach (string fq1 in fq1s)\n {\n if (!fq2s.Any(fq2 => fq2.CompareTo(fq1) == 0))\n {\n MessageBox.Show(\"Only paired end sequencing is supported. Add both paired files for \" + fq1 + \".\", \"Run Workflows\", MessageBoxButton.OK, MessageBoxImage.Information);\n throw new InvalidOperationException();\n }\n }\n\n //Options.ExperimentType = CmbxExperimentType.SelectedItem.ToString();\n var sraCollection = (ObservableCollection<SRADataGrid>)MainWindow.LbxSRAs.ItemsSource;\n Options.SraAccession = string.Join(\",\", sraCollection.Select(p => p.Name).ToArray());\n if (Options.SraAccession.Count() == 0 && options.Fastq1.Count() == 0)\n {\n Cb_AnalyzeIsoforms.IsChecked = false;\n Cb_AnalyzeIsoforms.IsEnabled = false;\n Cb_AnalyzeVariants.IsChecked = false;\n Cb_AnalyzeVariants.IsEnabled = false;\n }\n\n txtAnalysisDirectory.Text = AnalysisDirectory;\n txtThreads.Text = MainWindow.DockerCPUs.ToString();\n Threads = MainWindow.DockerCPUs;\n Lb_ThreadInfo.Content = $\"Integer between 1 and {MainWindow.DockerCPUs};\\nmaximum is set in Docker Desktop\";\n saveButton.IsEnabled = false;\n }\n\n private string TrimQuotesOrNull(string a)\n {\n return a == null ? a : a.Trim('\"');\n }\n\n private void PopulateChoices()\n {\n //CmbxExperimentType.Items.Add(ExperimentType.RNASequencing.ToString());\n //CmbxExperimentType.Items.Add(ExperimentType.WholeGenomeSequencing.ToString());\n //CmbxExperimentType.Items.Add(ExperimentType.ExomeSequencing.ToString());\n //CmbxExperimentType.SelectedIndex = 0; // hard coded selection (for now)\n\n EnsemblReleases = EnsemblRelease.GetReleases();\n }\n\n private void Species_SelectionChanged(object sender, System.Windows.Controls.SelectionChangedEventArgs e)\n {\n saveButton.IsEnabled = true;\n\n // get selection from species\n var selectedEnsembl = (EnsemblRelease)EnsemblReleaseVersions.SelectedItem;\n var selectedSpecies = (string)EnsemblSpecies.SelectedItem;\n Reference = selectedEnsembl.Genomes[selectedSpecies];\n }\n\n private void txtThreads_LostFocus(object sender, RoutedEventArgs e)\n {\n if (int.TryParse(txtThreads.Text, out int threads) && threads <= MainWindow.DockerCPUs && threads > 0)\n {\n Threads = threads;\n }\n else\n {\n txtThreads.Text = MainWindow.DockerCPUs.ToString();\n }\n }\n }\n}" }, { "alpha_fraction": 0.5710553526878357, "alphanum_fraction": 0.5891675353050232, "avg_line_length": 44.57143020629883, "blob_id": "fd12c2a30e9d08efb2e6166c65fb4d3928d8e749", "content_id": "cce33b89e23277fbaa77d7c2b86d132c7422bc57", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5742, "license_type": "permissive", "max_line_length": 180, "num_lines": 126, "path": "/Spritz/rules/align.smk", "repo_name": "trishorts/Spritz", "src_encoding": "UTF-8", "text": "import os\n\nREF=config[\"species\"] + \".\" + config[\"genome\"]\n\nrule directories:\n output: directory(\"data/ensembl/{REF}.dna.primary_assembly.karyotypic/\")\n shell: \"mkdir -p data/ensembl/{REF}.dna.primary_assembly.karyotypic/\"\n\nrule hisat_genome:\n '''Build genome index for hisat2'''\n input:\n fa=\"data/ensembl/{REF}.dna.primary_assembly.karyotypic.fa\",\n gtf=\"data/ensembl/{REF}.\" + config[\"release\"] + \".gff3\",\n threads: 12\n output:\n idx=\"data/ensembl/{REF}.dna.primary_assembly.karyotypic.1.ht2\",\n finished=\"data/ensembl/done_building_hisat_genome{REF}.txt\",\n benchmark: \"data/ensembl/{REF}.hisatbuild.benchmark\"\n log: \"data/ensembl/{REF}.hisatbuild.log\"\n shell:\n \"(hisat2-build -p {threads} data/ensembl/{REF}.dna.primary_assembly.karyotypic.fa\"\n \" data/ensembl/{REF}.dna.primary_assembly.karyotypic && touch {output.finished}) &> {log}\"\n\nrule hisat2_splice_sites:\n '''Fetch the splice sites from the gene model for hisat2'''\n input: \"data/ensembl/\" + REF + \".\" + config[\"release\"] + \".gff3\"\n output: \"data/ensembl/\" + REF + \".\" + config[\"release\"] + \".splicesites.txt\"\n shell: \"hisat2_extract_splice_sites.py {input} > {output}\"\n\ndef input_fq_args(fastqs):\n fqs=fastqs.split()\n if len(fqs) == 1:\n return f\"-U {fqs[0]}\"\n else:\n return f\"-1 {fqs[0]} -2 {fqs[1]}\"\n\ndef check_sra():\n '''Check if SRAs should be downloaded'''\n docheck = 'sra' in config and config[\"sra\"] is not None and len(config[\"sra\"]) > 0\n return docheck\n\nif check_sra():\n rule download_sras: # in the future, could use this to check SE vs PE: https://www.biostars.org/p/139422/\n '''Download fastqs from GEO SRA for quality control and alignment'''\n output:\n temp(\"{dir}/{sra,[A-Z0-9]+}_1.fastq\"), # constrain wildcards, so it doesn't soak up SRR######.trim_1.fastq\n temp(\"{dir}/{sra,[A-Z0-9]+}_2.fastq\")\n benchmark: \"{dir}/{sra}.benchmark\"\n log: \"{dir}/{sra}.log\"\n threads: 4\n shell:\n \"fasterq-dump -b 10MB -c 100MB -m 1000MB -p --threads {threads}\" # use 10x the default memory allocation for larger SRAs\n \" --split-files --temp {wildcards.dir} --outdir {wildcards.dir} {wildcards.sra} 2> {log}\"\nelse:\n rule expand_fastqs:\n '''Prepare compressed input fastqs for quality control'''\n input:\n fq1=\"{dir}/{fq}_1.fastq.gz\",\n fq2=\"{dir}/{fq}_2.fastq.gz\",\n output:\n fq1=temp(\"{dir}/{fq}_1.fastq\"),\n fq2=temp(\"{dir}/{fq}_2.fastq\"),\n shell: \"gunzip -k {input.fq1} && gunzip -k {input.fq2}\"\n\nrule fastp:\n '''Trim adapters, read quality filtering, make QC outputs'''\n input:\n fq1=\"{dir}/{sra}_1.fastq\" if check_sra() else \"{dir}/{fq}_1.fastq\",\n fq2=\"{dir}/{sra}_2.fastq\" if check_sra() else \"{dir}/{fq}_2.fastq\",\n output:\n fq1=\"{dir}/{sra}.trim_1.fastq.gz\" if check_sra() else \"{dir}/{fq}.trim_1.fastq.gz\",\n fq2=\"{dir}/{sra}.trim_2.fastq.gz\" if check_sra() else \"{dir}/{fq}.trim_2.fastq.gz\",\n html=\"{dir}/{sra}.trim.html\" if check_sra() else \"{dir}/{fq}.trim.html\",\n json=\"{dir}/{sra}.trim.json\" if check_sra() else \"{dir}/{fq}.trim.json\",\n threads: 6\n log: \"{dir}/{sra}.trim.log\" if check_sra() else \"{dir}/{fq}.trim.log\"\n params:\n quality=20,\n title=\"{sra}\" if check_sra() else \"{fq}\"\n shell:\n \"fastp -q {params.quality} -i {input.fq1} -I {input.fq2} -o {output.fq1} -O {output.fq2} \"\n \"-h {output.html} -j {output.json} \"\n \"-w {threads} -R {params.title} --detect_adapter_for_pe &> {log}\"\n\nrule hisat2_align_bam:\n '''Align trimmed reads'''\n input:\n \"data/ensembl/\" + REF + \".dna.primary_assembly.karyotypic.1.ht2\",\n fq1=\"{dir}/{sra}.trim_1.fastq.gz\" if check_sra() else \"{dir}/{fq}.trim_1.fastq.gz\",\n fq2=\"{dir}/{sra}.trim_2.fastq.gz\" if check_sra() else \"{dir}/{fq}.trim_2.fastq.gz\",\n ss=\"data/ensembl/\" + REF + \".\" + config[\"release\"] + \".splicesites.txt\"\n output:\n sorted=\"{dir}/align/{sra}.sorted.bam\" if check_sra() else \"{dir}/align/{fq}.sorted.bam\",\n threads: 12\n params:\n compression=\"9\",\n tempprefix=\"{dir}/align/{sra}.sorted\" if check_sra() else \"{dir}/align/{fq}.sorted\",\n log: \"{dir}/align/{sra}.hisat2.log\" if check_sra() else \"{dir}/align/{fq}.hisat2.log\"\n shell:\n \"(hisat2 -p {threads} -x data/ensembl/\" + REF + \".dna.primary_assembly.karyotypic -1 {input.fq1} -2 {input.fq2} --known-splicesite-infile {input.ss} | \" # align the suckers\n \"samtools view -h -F4 - | \" # get mapped reads only\n \"samtools sort -l {params.compression} -T {params.tempprefix} -o {output.sorted} -) 2> {log} && \" # sort them\n \"samtools index {output}\"\n\nrule hisat2_merge_bams:\n '''Merge the BAM files for each sample'''\n input:\n bams=expand(\"{{dir}}/align/{sra}.sorted.bam\", sra=config[\"sra\"]) if check_sra() else expand(\"{{dir}}/align/{fq}.sorted.bam\", fq=config[\"fq\"])\n output:\n sorted=\"{dir}/align/combined.sorted.bam\",\n stats=\"{dir}/align/combined.sorted.stats\"\n params:\n compression=\"9\",\n tempprefix=\"{dir}/align/combined.sorted\"\n log: \"{dir}/align/combined.sorted.log\"\n threads: 12\n resources: mem_mb=16000\n shell:\n \"(ls {input.bams} | \"\n \"{{ read firstbam; \"\n \"samtools view -h \"\"$firstbam\"\"; \"\n \"while read bam; do samtools view \"\"$bam\"\"; done; }} | \"\n \"samtools view -ubS - | \"\n \"samtools sort -@ {threads} -l {params.compression} -T {params.tempprefix} -o {output.sorted} - && \"\n \"samtools index {output.sorted} && \"\n \"samtools flagstat -@ {threads} {output.sorted} > {output.stats}) 2> {log}\"\n" }, { "alpha_fraction": 0.5198556184768677, "alphanum_fraction": 0.5198556184768677, "avg_line_length": 18.85714340209961, "blob_id": "3ded13380a59c9e537a2a00af813400bf8a177d3", "content_id": "2022d2900e00ad29ad0d4410fe305624376d0b1e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C#", "length_bytes": 279, "license_type": "permissive", "max_line_length": 43, "num_lines": 14, "path": "/Spritz/GUI/DataGrids/SRADataGrid.cs", "repo_name": "trishorts/Spritz", "src_encoding": "UTF-8", "text": "namespace Spritz\n{\n internal class SRADataGrid\n {\n public SRADataGrid(string name)\n {\n Name = name;\n }\n\n public string Name { get; set; }\n public string State { get; set; }\n public int Completion { get; set; }\n }\n}" }, { "alpha_fraction": 0.560664713382721, "alphanum_fraction": 0.5631687045097351, "avg_line_length": 40.76045608520508, "blob_id": "efeb7d5a75543d1987c5340bc9a37d4727e48244", "content_id": "7cde8be8a6d219b2f2e83b5d216246d25d9227c0", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C#", "length_bytes": 21967, "license_type": "permissive", "max_line_length": 314, "num_lines": 526, "path": "/Spritz/GUI/MainWindow.xaml.cs", "repo_name": "trishorts/Spritz", "src_encoding": "UTF-8", "text": "using System;\nusing System.Collections.ObjectModel;\nusing System.Diagnostics;\nusing System.IO;\nusing System.Linq;\nusing System.Text.RegularExpressions;\nusing System.Threading.Tasks;\nusing System.Windows;\nusing System.Windows.Controls;\nusing System.Windows.Input;\nusing System.Windows.Threading;\n\nnamespace Spritz\n{\n /// <summary>\n /// Interaction logic for MainWindow.xaml\n /// </summary>\n public partial class MainWindow : Window\n {\n public static readonly string CurrentVersion = \"0.2.0\";\n\n private readonly ObservableCollection<RNASeqFastqDataGrid> RnaSeqFastqCollection = new ObservableCollection<RNASeqFastqDataGrid>();\n private ObservableCollection<InRunTask> DynamicTasksObservableCollection = new ObservableCollection<InRunTask>();\n private readonly ObservableCollection<PreRunTask> StaticTasksObservableCollection = new ObservableCollection<PreRunTask>();\n private readonly ObservableCollection<SRADataGrid> SraCollection = new ObservableCollection<SRADataGrid>();\n private EverythingRunnerEngine Everything;\n private Regex outputScrub = new Regex(@\"(\\[\\d+m)\");\n\n public int DockerCPUs { get; set; }\n public double DockerMemory { get; set; }\n private string DockerImage { get; set; } = \"smithlab/spritz\";\n private string DockerStdOut { get; set; }\n private bool ShowStdOut { get; set; } = true;\n private string DockerSystemInfo { get; set; }\n private bool IsRunning { get; set; }\n\n public MainWindow()\n {\n InitializeComponent();\n DataGridRnaSeqFastq.DataContext = RnaSeqFastqCollection;\n WorkflowTreeView.DataContext = StaticTasksObservableCollection;\n LbxSRAs.ItemsSource = SraCollection;\n\n // Version information\n try\n {\n SpritzUpdater.GetVersionNumbersFromWeb();\n }\n catch (Exception e)\n {\n MessageBox.Show(\"Could not get newest version from web: \" + e.Message, \"Setup\", MessageBoxButton.OK, MessageBoxImage.Warning);\n }\n\n // Check Docker setup\n Dispatcher.Invoke(() =>\n {\n Process proc = new Process();\n proc.StartInfo.FileName = \"Powershell.exe\";\n proc.StartInfo.Arguments = \"docker system info\";\n proc.StartInfo.UseShellExecute = false;\n proc.StartInfo.RedirectStandardOutput = true;\n proc.StartInfo.RedirectStandardError = true;\n proc.StartInfo.CreateNoWindow = true;\n proc.Start();\n StreamReader outputReader = proc.StandardOutput;\n DockerSystemInfo = outputReader.ReadToEnd();\n proc.WaitForExit();\n });\n bool isDockerInstalled = !string.IsNullOrEmpty(DockerSystemInfo);\n if (isDockerInstalled)\n {\n ParseDockerSystemInfo(DockerSystemInfo);\n }\n string message = isDockerInstalled ?\n \"In Docker Desktop, please ensure all shared drives are enabled, and please ensure a Disk image size of at least 80 GB is enabled.\" :\n \"Docker is not installed. Please have Docker Desktop installed, enable all shared drives, and ensure a Disk image size of at least 80 GB is enabled.\";\n if (isDockerInstalled && DockerMemory < 16)\n {\n message += $\"{Environment.NewLine}{Environment.NewLine}The memory allocated to Docker is low ({DockerMemory}GB). Please raise this value above 16 GB in Docker Desktop if possible.\";\n }\n MessageBox.Show(message, \"Setup\", MessageBoxButton.OK, isDockerInstalled ? MessageBoxImage.Information : MessageBoxImage.Error);\n }\n\n private void ParseDockerSystemInfo(string dockerSystemInfo)\n {\n string[] infoLines = dockerSystemInfo.Split('\\n');\n string cpuLine = infoLines.FirstOrDefault(line => line.Trim().StartsWith(\"CPUs\"));\n if (int.TryParse(cpuLine.Split(':')[1].Trim(), out int dockerThreads))\n {\n DockerCPUs = dockerThreads;\n }\n\n double gibToGbConversion = 1.07374;\n string memoryLine = infoLines.FirstOrDefault(line => line.Trim().StartsWith(\"Total Memory\"));\n if (double.TryParse(memoryLine.Split(':')[1].Replace(\"GiB\", \"\").Trim(), out double memoryGB))\n {\n DockerMemory = memoryGB * gibToGbConversion;\n }\n }\n\n protected override void OnClosed(EventArgs e)\n {\n string message = \"Are you sure you would like to exit Spritz?\";\n message += IsRunning ? \" This will stop all Spritz processes, which may take a few moments.\" : \"\";\n if (MessageBox.Show(message, \"Exit Spritz\", MessageBoxButton.OKCancel) == MessageBoxResult.Cancel)\n return;\n StopDocker(\"stop\"); // may need to kill processes if we see that they get stuck in the future, but killing leaves some files open, which makes them hard to delete\n base.OnClosed(e);\n }\n\n private void CancelTasksButton_Click(object sender, RoutedEventArgs e)\n {\n StopDocker(\"stop\");\n }\n\n private void StopDocker(string command)\n {\n // new process that kills docker container (if any)\n if (Everything != null && !string.IsNullOrEmpty(Everything.PathToWorkflow))\n {\n Process proc = new Process();\n proc.StartInfo.FileName = \"Powershell.exe\";\n proc.StartInfo.Arguments = $\"docker {command} {Everything.SpritzContainerName}\";\n proc.StartInfo.CreateNoWindow = true;\n proc.StartInfo.UseShellExecute = false;\n proc.Start();\n\n if (proc != null && !proc.HasExited)\n {\n proc.WaitForExit();\n }\n }\n }\n\n private void UpdateSRABox()\n {\n if (RnaSeqFastqCollection.Count > 0)\n {\n TbxSRA.IsEnabled = false;\n BtnAddSRA.IsEnabled = false;\n BtnClearSRA.IsEnabled = false;\n }\n else\n {\n TbxSRA.IsEnabled = true;\n BtnAddSRA.IsEnabled = true;\n BtnClearSRA.IsEnabled = true;\n }\n }\n\n private void Window_Drop(object sender, DragEventArgs e)\n {\n string[] files = (string[])e.Data.GetData(DataFormats.FileDrop);\n if (files != null)\n {\n foreach (var draggedFilePath in files)\n {\n if (Directory.Exists(draggedFilePath))\n {\n foreach (string file in Directory.EnumerateFiles(draggedFilePath, \"*.*\", SearchOption.AllDirectories))\n {\n AddAFile(file);\n }\n }\n else\n {\n AddAFile(draggedFilePath);\n }\n DataGridRnaSeqFastq.Items.Refresh();\n }\n }\n UpdateOutputFolderTextbox();\n UpdateSRABox();\n }\n\n private void Window_Loaded(object sender, RoutedEventArgs e)\n {\n if (SpritzUpdater.NewestKnownVersion != null && !SpritzUpdater.IsVersionLower(SpritzUpdater.NewestKnownVersion))\n {\n try\n {\n SpritzUpdater newwind = new SpritzUpdater();\n newwind.ShowDialog();\n }\n catch (Exception ex)\n {\n MessageBox.Show(ex.ToString());\n }\n }\n }\n\n private void MenuItem_Wiki_Click(object sender, RoutedEventArgs e)\n {\n Process.Start(@\"https://github.com/smith-chem-wisc/Spritz/wiki\");\n }\n\n private void MenuItem_Contact_Click(object sender, RoutedEventArgs e)\n {\n Process.Start(@\"https://github.com/smith-chem-wisc/Spritz\");\n }\n\n private void RunWorkflowButton_Click(object sender, RoutedEventArgs e)\n {\n try\n {\n if (StaticTasksObservableCollection.Count == 0)\n {\n MessageBox.Show(\"You must add a workflow before a run.\", \"Run Workflows\", MessageBoxButton.OK, MessageBoxImage.Information);\n return;\n }\n else if (RnaSeqFastqCollection.Any() && GetPathToFastqs().CompareTo(OutputFolderTextBox.Text) != 0) // to be edited\n {\n MessageBox.Show(\"FASTQ files do not exist in the user-defined analysis directory.\", \"Run Workflows\", MessageBoxButton.OK, MessageBoxImage.Information);\n return;\n }\n\n DynamicTasksObservableCollection = new ObservableCollection<InRunTask>();\n DynamicTasksObservableCollection.Add(new InRunTask(\"Workflow 1\", StaticTasksObservableCollection.First().options));\n WorkflowTreeView.DataContext = DynamicTasksObservableCollection;\n\n Everything = new EverythingRunnerEngine(DynamicTasksObservableCollection.Select(b => new Tuple<string, Options>(b.DisplayName, b.options)).First(), OutputFolderTextBox.Text);\n\n InformationTextBox.Document.Blocks.Clear();\n InformationTextBox.AppendText($\"Command executing: Powershell.exe {Everything.GenerateCommandsDry(DockerImage)}\\n\\n\"); // keep for debugging\n InformationTextBox.AppendText($\"Saving output to {Everything.PathToWorkflow}. Please monitor it there...\\n\\n\");\n\n IsRunning = true;\n Everything.WriteConfig(StaticTasksObservableCollection.First().options);\n var t = new Task(RunEverythingRunner);\n t.Start();\n t.ContinueWith(DisplayAnyErrors);\n\n // update gui\n RunWorkflowButton.IsEnabled = false;\n ClearTasksButton.IsEnabled = true;\n BtnWorkFlow.IsEnabled = false;\n ResetTasksButton.IsEnabled = true;\n }\n catch (TaskCanceledException)\n {\n // Ignore error\n }\n }\n\n private void RunEverythingRunner()\n {\n Process proc = new Process();\n proc.StartInfo.FileName = \"Powershell.exe\";\n proc.StartInfo.Arguments = Everything.GenerateCommandsDry(DockerImage);\n proc.StartInfo.UseShellExecute = false;\n proc.StartInfo.RedirectStandardOutput = true;\n proc.StartInfo.RedirectStandardError = true;\n proc.StartInfo.CreateNoWindow = true;\n proc.OutputDataReceived += new DataReceivedEventHandler(OutputHandler);\n proc.ErrorDataReceived += new DataReceivedEventHandler(OutputHandler);\n proc.Start();\n proc.BeginOutputReadLine();\n proc.BeginErrorReadLine();\n proc.WaitForExit();\n }\n\n private void OutputHandler(object source, DataReceivedEventArgs e)\n {\n Dispatcher.Invoke(() =>\n {\n if (!string.IsNullOrEmpty(e.Data))\n {\n string output = outputScrub.Replace(e.Data, \"\");\n DockerStdOut += output + Environment.NewLine;\n if (ShowStdOut)\n {\n lock (InformationTextBox)\n InformationTextBox.AppendText(output + Environment.NewLine);\n }\n using (StreamWriter sw = File.Exists(Everything.PathToWorkflow) ? File.AppendText(Everything.PathToWorkflow) : File.CreateText(Everything.PathToWorkflow))\n {\n sw.WriteLine(output);\n }\n }\n });\n }\n\n private void DisplayAnyErrors(Task obj)\n {\n Dispatcher.Invoke(() => InformationTextBox.AppendText(\"Done!\" + Environment.NewLine));\n if (StaticTasksObservableCollection.Count > 0)\n {\n Dispatcher.Invoke(() => MessageBox.Show(\"Finished! Workflow summary is located in \"\n + StaticTasksObservableCollection.First().options.AnalysisDirectory, \"Spritz Workflow\",\n MessageBoxButton.OK, MessageBoxImage.Information));\n }\n IsRunning = false;\n }\n\n private void BtnAddRnaSeqFastq_Click(object sender, RoutedEventArgs e)\n {\n Microsoft.Win32.OpenFileDialog openPicker = new Microsoft.Win32.OpenFileDialog()\n {\n Filter = \"FASTQ Files|*.fastq\",\n FilterIndex = 1,\n RestoreDirectory = true,\n Multiselect = true\n };\n if (openPicker.ShowDialog() == true)\n {\n foreach (var filepath in openPicker.FileNames)\n {\n AddAFile(filepath);\n }\n }\n DataGridRnaSeqFastq.Items.Refresh();\n UpdateSRABox();\n }\n\n private void BtnClearRnaSeqFastq_Click(object sender, RoutedEventArgs e)\n {\n RnaSeqFastqCollection.Clear();\n UpdateOutputFolderTextbox();\n UpdateSRABox();\n }\n\n private void ClearTasksButton_Click(object sender, RoutedEventArgs e)\n {\n StaticTasksObservableCollection.Clear();\n WorkflowTreeView.DataContext = StaticTasksObservableCollection;\n InformationTextBox.Document.Blocks.Clear();\n UpdateTaskGuiStuff();\n }\n\n private void ResetTasksButton_Click(object sender, RoutedEventArgs e)\n {\n RunWorkflowButton.IsEnabled = true;\n ClearTasksButton.IsEnabled = true;\n BtnWorkFlow.IsEnabled = false;\n ResetTasksButton.IsEnabled = false;\n\n DynamicTasksObservableCollection.Clear();\n WorkflowTreeView.DataContext = StaticTasksObservableCollection;\n }\n\n private void BtnAddSRA_Click(object sender, RoutedEventArgs e)\n {\n if (TbxSRA.Text.Contains(\"SR\") || TbxSRA.Text.Contains(\"ER\"))\n {\n if (SraCollection.Any(s => s.Name == TbxSRA.Text.Trim()))\n {\n MessageBox.Show(\"That SRA has already been added. Please choose a new SRA accession.\", \"Workflow\", MessageBoxButton.OK, MessageBoxImage.Information);\n }\n else\n {\n SRADataGrid sraDataGrid = new SRADataGrid(TbxSRA.Text.Trim());\n SraCollection.Add(sraDataGrid);\n }\n }\n else if (MessageBox.Show(\"SRA accessions are expected to start with \\\"SR\\\" or \\\"ER\\\", such as SRX254398 or ERR315327. View the GEO SRA website?\", \"Workflow\", MessageBoxButton.YesNo, MessageBoxImage.Question, MessageBoxResult.No) == MessageBoxResult.Yes)\n {\n Process.Start(\"https://www.ncbi.nlm.nih.gov/sra\");\n }\n }\n\n private void BtnClearSRA_Click(object sender, RoutedEventArgs e)\n {\n SraCollection.Clear();\n BtnAddSRA.IsEnabled = true;\n }\n\n private void BtnWorkFlow_Click(object sender, RoutedEventArgs e)\n {\n if (SraCollection.Count == 0 && RnaSeqFastqCollection.Count == 0)\n {\n if (MessageBox.Show(\"You have not added any nucleic acid sequencing data (SRA accession or fastq files). Would you like to continue to make a protein database from the reference gene model?\", \"Workflow\", MessageBoxButton.YesNo, MessageBoxImage.Question, MessageBoxResult.No) == MessageBoxResult.No)\n {\n return;\n }\n }\n\n try\n {\n var dialog = new WorkFlowWindow(string.IsNullOrEmpty(OutputFolderTextBox.Text) ? new Options(DockerCPUs).AnalysisDirectory : OutputFolderTextBox.Text);\n if (dialog.ShowDialog() == true)\n {\n AddTaskToCollection(dialog.Options);\n UpdateTaskGuiStuff();\n UpdateOutputFolderTextbox();\n }\n }\n catch (InvalidOperationException)\n {\n // does not open workflow window until all fastq files are added, if any\n }\n }\n\n private void UpdateTaskGuiStuff()\n {\n if (StaticTasksObservableCollection.Count == 0)\n {\n RunWorkflowButton.IsEnabled = false;\n ClearTasksButton.IsEnabled = false;\n BtnWorkFlow.IsEnabled = true;\n ResetTasksButton.IsEnabled = false;\n }\n else\n {\n RunWorkflowButton.IsEnabled = true;\n ClearTasksButton.IsEnabled = true;\n BtnWorkFlow.IsEnabled = false;\n ResetTasksButton.IsEnabled = false;\n }\n }\n\n private void AddTaskToCollection(Options ye)\n {\n PreRunTask te = new PreRunTask(ye);\n StaticTasksObservableCollection.Add(te);\n StaticTasksObservableCollection.Last().DisplayName = \"Task\" + (StaticTasksObservableCollection.IndexOf(te) + 1);\n }\n\n private string GetPathToFastqs()\n {\n var MatchingChars =\n from len in Enumerable.Range(0, RnaSeqFastqCollection.Select(b => b.FilePath).Min(s => s.Length)).Reverse()\n let possibleMatch = RnaSeqFastqCollection.Select(b => b.FilePath).First().Substring(0, len)\n where RnaSeqFastqCollection.Select(b => b.FilePath).All(f => f.StartsWith(possibleMatch, StringComparison.Ordinal))\n select possibleMatch;\n\n return Path.Combine(Path.GetDirectoryName(MatchingChars.First()));\n }\n\n private void UpdateOutputFolderTextbox()\n {\n if (StaticTasksObservableCollection.Count > 0)\n {\n OutputFolderTextBox.Text = StaticTasksObservableCollection.First().options.AnalysisDirectory;\n }\n else if (RnaSeqFastqCollection.Any())\n {\n OutputFolderTextBox.Text = GetPathToFastqs();\n }\n else\n {\n OutputFolderTextBox.Clear();\n }\n }\n\n private void AddAFile(string filepath)\n {\n if (SraCollection.Count == 0)\n {\n var theExtension = Path.GetExtension(filepath).ToLowerInvariant();\n theExtension = theExtension == \".gz\" ? Path.GetExtension(Path.GetFileNameWithoutExtension(filepath)).ToLowerInvariant() : theExtension;\n switch (theExtension)\n {\n case \".fastq\":\n if (Path.GetFileName(filepath).Contains(\"_1\") || Path.GetFileName(filepath).Contains(\"_2\"))\n {\n RNASeqFastqDataGrid rnaSeqFastq = new RNASeqFastqDataGrid(filepath);\n RnaSeqFastqCollection.Add(rnaSeqFastq);\n UpdateOutputFolderTextbox();\n break;\n }\n else\n {\n MessageBox.Show(\"FASTQ files must have *_1.fastq and *_2.fastq extensions.\", \"Run Workflows\", MessageBoxButton.OK, MessageBoxImage.Information);\n return;\n }\n }\n }\n else\n {\n MessageBox.Show(\"User already added SRA number. Please only choose one input: 1) SRA accession 2) FASTQ files.\", \"Run Workflows\", MessageBoxButton.OK, MessageBoxImage.Information);\n return;\n }\n }\n\n private void WorkflowTreeView_MouseDoubleClick(object sender, MouseButtonEventArgs e)\n {\n //var a = sender as TreeView;\n //if (a.SelectedItem is PreRunTask preRunTask)\n //{\n // var workflowDialog = new WorkFlowWindow(preRunTask.options);\n // workflowDialog.ShowDialog();\n // WorkflowTreeView.Items.Refresh();\n //}\n }\n\n private void WarningsTextBox_TextChanged(object sender, TextChangedEventArgs e)\n {\n InformationTextBox.ScrollToEnd();\n }\n\n private void DockerImage_TextChanged(object sender, TextChangedEventArgs e)\n {\n DockerImage = tb_DockerImage.Text;\n }\n\n private void ShowTopButton_Click(object sender, RoutedEventArgs e)\n {\n ShowStdOut = false;\n Dispatcher.Invoke(() =>\n {\n InformationTextBox.Document.Blocks.Clear();\n\n Process proc = new Process();\n proc.StartInfo.FileName = \"Powershell.exe\";\n proc.StartInfo.Arguments = Everything.GenerateTopComand();\n proc.StartInfo.UseShellExecute = false;\n proc.StartInfo.RedirectStandardOutput = true;\n proc.StartInfo.CreateNoWindow = true;\n proc.Start();\n StreamReader outputReader = proc.StandardOutput;\n InformationTextBox.AppendText(outputReader.ReadToEnd());\n proc.WaitForExit();\n });\n }\n\n private void ShowOutputButton_Click(object sender, RoutedEventArgs e)\n {\n ShowStdOut = true;\n lock (InformationTextBox)\n {\n InformationTextBox.Document.Blocks.Clear();\n InformationTextBox.AppendText(DockerStdOut);\n }\n }\n }\n}" }, { "alpha_fraction": 0.6440505385398865, "alphanum_fraction": 0.6481185555458069, "avg_line_length": 50.36567306518555, "blob_id": "5a883ac730c7e3cc34b8eabd72e844dd2a6c6c86", "content_id": "b8f845a3302d215dcf0bca0c716fa7f1142186c5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6883, "license_type": "permissive", "max_line_length": 135, "num_lines": 134, "path": "/Spritz/rules/proteogenomics.smk", "repo_name": "trishorts/Spritz", "src_encoding": "UTF-8", "text": "TRANSFER_MOD_DLL=\"TransferUniProtModifications/TransferUniProtModifications/bin/Release/netcoreapp2.1/TransferUniProtModifications.dll\"\nREF=config[\"species\"] + \".\" + config[\"genome\"]\n\nrule download_protein_xml:\n output:\n xml=UNIPROTXML,\n fasta=UNIPROTFASTA,\n log: UNIPROTXML + \".log\"\n shell:\n \"(python scripts/get_proteome.py && \"\n \"python scripts/download_uniprot.py xml | gzip -c > {output.xml} && \" #fixme\n \"python scripts/download_uniprot.py fasta > {output.fasta}) &> {log}\"\n\nrule build_transfer_mods:\n output: TRANSFER_MOD_DLL\n log: \"data/TransferUniProtModifications.build.log\"\n shell:\n \"(cd TransferUniProtModifications && \"\n \"dotnet restore && \"\n \"dotnet build -c Release TransferUniProtModifications.sln) &> {log}\"\n\nrule transfer_modifications_variant:\n input:\n transfermods=TRANSFER_MOD_DLL,\n unixml=UNIPROTXML,\n protxml=\"{dir}/variants/combined.spritz.snpeff.protein.xml\",\n output:\n protfastawithdecoys=\"{dir}/variants/combined.spritz.snpeff.protein.withdecoys.fasta\",\n protxmlgz=\"{dir}/variants/combined.spritz.snpeff.protein.xml.gz\",\n protxmlwithmods=temp(\"{dir}/variants/combined.spritz.snpeff.protein.withmods.xml\"),\n protxmlwithmodsgz=\"{dir}/variants/combined.spritz.snpeff.protein.withmods.xml.gz\",\n log: \"{dir}/combined.spritz.snpeff.protein.withmods.log\"\n shell:\n \"(dotnet {input.transfermods} -x {input.unixml} -y {input.protxml} && \"\n \"gzip -k {input.protxml} {output.protxmlwithmods}) &> {log}\"\n\nrule transfer_modifications_isoformvariant:\n input:\n transfermods=TRANSFER_MOD_DLL,\n unixml=UNIPROTXML,\n protxml=\"{dir}/variants/combined.spritz.isoformvariants.protein.xml\",\n output:\n protfastawithdecoys=\"{dir}/variants/combined.spritz.isoformvariants.protein.withdecoys.fasta\",\n protxmlgz=\"{dir}/variants/combined.spritz.isoformvariants.protein.xml.gz\",\n protxmlwithmods=temp(\"{dir}/variants/combined.spritz.isoformvariants.protein.withmods.xml\"),\n protxmlwithmodsgz=\"{dir}/variants/combined.spritz.isoformvariants.protein.withmods.xml.gz\",\n log: \"{dir}/combined.spritz.isoformvariants.protein.withmods.log\"\n shell:\n \"(dotnet {input.transfermods} -x {input.unixml} -y {input.protxml} && \"\n \"gzip -k {output.protxmlwithmods} {input.protxml}) &> {log}\"\n\nrule generate_reference_snpeff_database:\n input:\n jar=\"SnpEff/snpEff.jar\",\n gff3=GFF3,\n pfa=\"data/ensembl/{REF}.pep.all.fa\",\n gfa=\"data/ensembl/{REF}.dna.primary_assembly.karyotypic.fa\",\n output:\n pfa=\"SnpEff/data/{REF}/protein.fa\",\n gff3=\"SnpEff/data/{REF}/genes.gff\",\n gfa=\"SnpEff/data/genomes/{REF}.fa\",\n done=\"SnpEff/data/{REF}/done{REF}.txt\",\n resources: mem_mb=16000\n benchmark: \"SnpEff/data/{REF}/snpeffdatabase.benchmark\"\n log: \"SnpEff/data/{REF}/snpeffdatabase.log\"\n shell:\n \"cp {input.gff3} {output.gff3} && \"\n \"cp {input.pfa} {output.pfa} && \"\n \"cp {input.gfa} {output.gfa} && \"\n \"echo \\\"\\n# {REF}\\\" >> SnpEff/snpEff.config && \"\n \"echo \\\"{REF}.genome : Human genome \" + GENOME_VERSION + \" using RefSeq transcripts\\\" >> SnpEff/snpEff.config && \"\n \"echo \\\"{REF}.reference : ftp://ftp.ncbi.nlm.nih.gov/refseq/H_sapiens/\\\" >> SnpEff/snpEff.config && \"\n \"echo \\\"\\t{REF}.M.codonTable : Vertebrate_Mitochondrial\\\" >> SnpEff/snpEff.config && \"\n \"echo \\\"\\t{REF}.MT.codonTable : Vertebrate_Mitochondrial\\\" >> SnpEff/snpEff.config && \"\n \"(java -Xmx{resources.mem_mb}M -jar {input.jar} build -gff3 -v {REF}) &> {log} && touch {output.done}\"\n\nrule reference_protein_xml:\n \"\"\"\n Create protein XML with sequences from the reference gene model.\n \"\"\"\n input:\n \"SnpEff/data/\" + REF + \"/done\" + REF + \".txt\",\n snpeff=\"SnpEff/snpEff.jar\",\n fa=\"data/ensembl/\" + REF + \".dna.primary_assembly.karyotypic.fa\",\n transfermods=TRANSFER_MOD_DLL,\n unixml=UNIPROTXML,\n output:\n done=\"{dir}/variants/done\" + REF + \".\" + ENSEMBL_VERSION + \".txt\",\n protxml=temp(\"{dir}/variants/\" + REF + \".\" + ENSEMBL_VERSION + \".protein.xml\"),\n protxmlgz=\"{dir}/variants/\" + REF + \".\" + ENSEMBL_VERSION + \".protein.xml.gz\",\n protfa=\"{dir}/variants/\" + REF + \".\" + ENSEMBL_VERSION + \".protein.fasta\",\n protwithdecoysfa=\"{dir}/variants/\" + REF + \".\" + ENSEMBL_VERSION + \".protein.withdecoys.fasta\",\n protxmlwithmods=temp(\"{dir}/variants/\" + REF + \".\" + ENSEMBL_VERSION + \".protein.withmods.xml\"),\n protxmlwithmodsgz=\"{dir}/variants/\" + REF + \".\" + ENSEMBL_VERSION + \".protein.withmods.xml.gz\",\n resources: mem_mb=16000\n benchmark: \"{dir}/variants/\" + REF + \".\" + ENSEMBL_VERSION + \".spritz.benchmark\"\n log: \"{dir}/variants/\" + REF + \".\" + ENSEMBL_VERSION + \".spritz.log\"\n shell:\n \"(java -Xmx{resources.mem_mb}M -jar {input.snpeff} -v -nostats\"\n \" -xmlProt {output.protxml} {REF} && \" # no isoforms, no variants\n \"dotnet {input.transfermods} -x {input.unixml} -y {output.protxml} && \"\n \"gzip -k {output.protxmlwithmods} {output.protxml}) &> {log} && touch {output.done}\"\n\nrule custom_protein_xml:\n \"\"\"\n Create protein XML with sequences from the isoform discovery gene model.\n \"\"\"\n input:\n snpeff=\"SnpEff/snpEff.jar\",\n fa=\"data/ensembl/\" + REF + \".dna.primary_assembly.karyotypic.fa\",\n isoform_reconstruction=[\n \"SnpEff/data/combined.transcripts.genome.gff3/genes.gff\",\n \"SnpEff/data/combined.transcripts.genome.gff3/protein.fa\",\n \"SnpEff/data/genomes/combined.transcripts.genome.gff3.fa\",\n \"SnpEff/data/combined.transcripts.genome.gff3/done.txt\"],\n transfermods=TRANSFER_MOD_DLL,\n unixml=UNIPROTXML,\n output:\n protxml=temp(\"{dir}/isoforms/combined.spritz.isoform.protein.xml\"),\n protwithdecoysfa=\"{dir}/isoforms/combined.spritz.isoform.protein.withdecoys.fasta\",\n protxmlgz=\"{dir}/isoforms/combined.spritz.isoform.protein.xml.gz\",\n protxmlwithmods=temp(\"{dir}/isoforms/combined.spritz.isoform.protein.withmods.xml\"),\n protxmlwithmodsgz=\"{dir}/isoforms/combined.spritz.isoform.protein.withmods.xml.gz\",\n protfa=\"{dir}/isoforms/combined.spritz.isoform.protein.fasta\",\n params:\n ref=\"combined.transcripts.genome.gff3\", # with isoforms\n resources: mem_mb=16000\n benchmark: \"{dir}/isoforms/combined.spritz.isoform.benchmark\"\n log: \"{dir}/isoforms/combined.spritz.isoform.log\"\n shell:\n \"(java -Xmx{resources.mem_mb}M -jar {input.snpeff} -v -nostats\"\n \" -xmlProt {output.protxml} {params.ref} < /dev/null && \" # isoforms, no variants\n \"dotnet {input.transfermods} -x {input.unixml} -y {output.protxml} && \"\n \"gzip -k {output.protxmlwithmods} {output.protxml}) &> {log}\"\n" }, { "alpha_fraction": 0.7241914868354797, "alphanum_fraction": 0.7795601487159729, "avg_line_length": 64.50847625732422, "blob_id": "5bb3845a4938403d909cc7c481737e4606db61c9", "content_id": "11c432aad288de90e3cb0461c710d4ee15e85ced", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3865, "license_type": "permissive", "max_line_length": 529, "num_lines": 59, "path": "/README.md", "repo_name": "trishorts/Spritz", "src_encoding": "UTF-8", "text": "# Spritz\nSoftware for RNA-Seq analysis on Windows, including creating sample-specific proteoform databases from genomic data\n\n[![Build status](https://ci.appveyor.com/api/projects/status/p54yrm6iixqm8jsf?svg=true)](https://ci.appveyor.com/project/acesnik/spritz)\n[![Release](https://img.shields.io/github/v/release/smith-chem-wisc/Spritz)](https://github.com/smith-chem-wisc/Spritz/releases/latest)\n[![Github All Releases](https://img.shields.io/github/downloads/smith-chem-wisc/Spritz/total.svg)](https://github.com/smith-chem-wisc/Spritz/releases/)\n[![Docker Pulls](https://img.shields.io/docker/pulls/smithlab/spritz)](https://hub.docker.com/r/smithlab/spritz/tags?page=1&ordering=last_updated)\n[![Follow us on Twitter](https://img.shields.io/twitter/follow/smith_chem_wisc?label=Twitter&style=social)](https://twitter.com/smith_chem_wisc)\n\nSpritz uses snakemake and Docker to install and run commandline tools for Next-Generation Sequencing (NGS) analysis.\n\nSpritz can be downloaded [here](https://github.com/smith-chem-wisc/Spritz/releases).\n\n![image](https://user-images.githubusercontent.com/16342951/84078314-55585280-a99e-11ea-9096-bebfcbb06bef.png)\n\n## Running Spritz with GUI\n\n1. Install [Docker Desktop for Windows](https://hub.docker.com/editions/community/docker-ce-desktop-windows).\n\n2. Under Docker Settings, enable C and other necessary drives and allocate sufficient resources (recommended 16GB).\n\n ![settings](https://user-images.githubusercontent.com/42819128/70090841-8a937a80-15e0-11ea-9742-ca959a89deca.png)\n\n3. Launch Spritz.\n\n ![howto](https://user-images.githubusercontent.com/42819128/70091146-2624eb00-15e1-11ea-9230-bfd118aa03d9.png)\n\n Step 1: Input SRA accessions OR FASTQ files.\n\n Step 2: Create and customize your Spritz workflow.\n\n ![workflow](https://user-images.githubusercontent.com/42819128/70091992-e65f0300-15e2-11ea-9e0f-7bb4262afefa.png)\n\n Step 3: Run Spritz!\n\n## System Requirements\n\n* Environment:\n * .NET Core 3.1:\n * Windows: https://dotnet.microsoft.com/download/dotnet-core/thank-you/runtime-desktop-3.1.3-windows-x64-installer\n* 16 GB RAM recommended\n* Note that the installer (Spritz.msi) only works on Windows. \n* Spritz will also work on the commandline within a Unix system (Linux, Mac, WSL on Windows). First, install [miniconda3](https://docs.conda.io/en/latest/miniconda.html), and then create a `conda` environment for spritz by running `conda env create --name spritz --file environment.yaml; conda activate spritz`. After adapting the `config.yaml` file manually, Spritz may be run using `snakemake -j {threads} --resources mem_mb={memory_megabytes}`, where `{threads}` and `{memory_megabytes}` are replaced with your specifications.\n\n## Test it out! Try constructing the database for U2OS from the paper.\n\n1. Add SRR629563 to the SRA list.\n\n2. Create the Spritz workflow. Select \"release-82\" and \"homo_sapiens.\"\n\n3. Run Spritz!\n\nMonitor progress in the Information textbox. The final database named `final/combined.spritz.snpeff.protein.withmods.xml.gz` can be used to search MS/MS with [MetaMorpheus](https://github.com/smith-chem-wisc/MetaMorpheus) to find variant peptides and proteoforms, possibly with modifications. We recommend performing 1) Calibration, 2) Global PTM Discovery (G-PTM-D), and 3) Search tasks to get the best results.\n\n![image](https://user-images.githubusercontent.com/16342951/85874687-a76be700-b798-11ea-9bff-9f68646b03de.png)\n\nThe final database named `final/combined.spritz.snpeff.protein.fasta` is generated to contain variant protein sequences, and it may be used in other search software, such as Proteome Discoverer, ProSight, and MASH Explorer.\n\nThe final database named `final/combined.spritz.snpeff.protein.withdecoys.fasta` is ready for use in MSFragger. It is generated to contain variant protein sequences with decoy protein sequences appended.\n" }, { "alpha_fraction": 0.6453403234481812, "alphanum_fraction": 0.6541045904159546, "avg_line_length": 44.03947448730469, "blob_id": "f0232442f4388b75fe8101c7f1d2616cedbe1bc9", "content_id": "515633234cd416865e6d8d97c56123d81459d9d6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3423, "license_type": "permissive", "max_line_length": 107, "num_lines": 76, "path": "/Spritz/Snakefile", "repo_name": "trishorts/Spritz", "src_encoding": "UTF-8", "text": "configfile: \"config.yaml\"\nSPECIES = config[\"species\"]\nGENOME_VERSION = config[\"genome\"]\nENSEMBL_VERSION = config[\"release\"]\nGENEMODEL_VERSION = GENOME_VERSION + \".\" + ENSEMBL_VERSION\nGENOME_FA = f\"data/ensembl/{SPECIES}.{GENOME_VERSION}.dna.primary_assembly.fa\"\nENSEMBL_GFF = f\"data/ensembl/{SPECIES}.{GENEMODEL_VERSION}.gff3\"\nTEST_GENOME_FA = f\"data/ensembl/202122.fa\"\nTEST_ENSEMBL_GFF = f\"data/ensembl/202122.gff3\"\nFA=GENOME_FA # for analysis; can also be TEST_GENOME_FA\nGFF3=ENSEMBL_GFF # for analysis; can also be TEST_ENSEMBL_GFF\nREFSTAR_PREFIX = f\"data/ensembl/{SPECIES}.{GENEMODEL_VERSION}RsemStar/RsemStarReference\"\nREFSTAR_FOLDER = f\"data/ensembl/{SPECIES}.{GENEMODEL_VERSION}RsemStar/\"\nREF_PREFIX = f\"data/ensembl/{SPECIES}.{GENEMODEL_VERSION}Rsem/RsemReference\"\nREF_FOLDER = f\"data/ensembl/{SPECIES}.{GENEMODEL_VERSION}Rsem/\"\nREF = SPECIES + \".\" + GENOME_VERSION\nUNIPROTXML=\"data/uniprot/\" + config[\"species\"] + \".protein.xml.gz\" #\"data/Homo_sapiens_202022.xml.gz\"\nUNIPROTFASTA=\"data/uniprot/\" + config[\"species\"] + \".protein.fasta\" #\"data/Homo_sapiens_202022.xml.gz\"\n\ndef output(wildcards):\n outputs = []\n if not \"spritzversion\" in config:\n outputs = expand(\n \"{dir}/please_update_spritz.txt\",\n dir=config[\"analysisDirectory\"])\n elif len(config[\"analyses\"]) == 0:\n outputs = expand([\n \"{dir}/prose.txt\",\n \"{dir}/variants/done\" + REF + \".\" + ENSEMBL_VERSION + \".txt\"], # reference\n dir=config[\"analysisDirectory\"])\n elif \"variant\" in config[\"analyses\"] and len(config[\"analyses\"]) == 1:\n outputs = expand([\n \"{dir}/prose.txt\",\n \"{dir}/final/combined.spritz.snpeff.protein.withmods.xml.gz\", # variants\n \"{dir}/variants/done\" + REF + \".\" + ENSEMBL_VERSION + \".txt\"], # reference\n dir=config[\"analysisDirectory\"])\n elif \"isoform\" in config[\"analyses\"] and len(config[\"analyses\"]) == 1:\n outputs = expand([\n \"{dir}/prose.txt\",\n \"{dir}/final/combined.spritz.isoform.protein.withmods.xml.gz\"], # isoforms\n dir=config[\"analysisDirectory\"])\n elif \"variant\" in config[\"analyses\"] and \"isoform\" in config[\"analyses\"]:\n outputs = expand([\n \"{dir}/prose.txt\",\n \"{dir}/final/combined.spritz.snpeff.protein.withmods.xml.gz\", # variants\n \"{dir}/final/combined.spritz.isoformvariants.protein.withmods.xml.gz\", # isoform variants\n \"{dir}/final/combined.spritz.isoform.protein.withmods.xml.gz\", # isoforms\n \"{dir}/variants/done\" + REF + \".\" + ENSEMBL_VERSION + \".txt\"], # reference\n dir=config[\"analysisDirectory\"])\n return outputs\n\nrule all:\n input: output\n\nrule clean:\n shell:\n \"rm -rf data/ ChromosomeMappings/ SnpEff/ tmp/ fast.tmp/ && \"\n \"cd GtfSharp && dotnet clean && cd .. && \"\n \"cd TransferUniProtModifications && dotnet clean && cd ..\"\n\nrule update_message:\n output: \"{dir}/please_update_spritz.txt\"\n shell: \"echo \\\"Please update Spritz at https://github.com/smith-chem-wisc/Spritz/releases\\\" > {output}\"\n\nrule prose:\n output: \"{dir}/prose.txt\"\n shell: \"python scripts/prose.py {output}\"\n\ninclude: \"rules/downloads.smk\"\ninclude: \"rules/align.smk\"\ninclude: \"rules/variants.smk\"\ninclude: \"rules/isoforms.smk\"\ninclude: \"rules/proteogenomics.smk\"\ninclude: \"rules/quant.smk\"\ninclude: \"rules/fusion.smk\"\ninclude: \"rules/testing.smk\"\n" } ]
10
MikoyChinese/Cap_tool
https://github.com/MikoyChinese/Cap_tool
c56bffffeee49c83138bb18e989623fbe4c9bc2a
c470c7c5b513acb713df591f60a94e4403aec7aa
b73957bcadaf73b50a60378830e880d9f0110272
refs/heads/master
2020-03-28T10:41:38.391274
2018-10-19T09:36:25
2018-10-19T09:36:25
148,134,948
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6093382835388184, "alphanum_fraction": 0.6167282462120056, "avg_line_length": 29.387754440307617, "blob_id": "b46837b315f07ad44ca46f231f5014b3e9953f75", "content_id": "f2bbbd0cb6145565b8dd678c2e7cf02c281b2364", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2989, "license_type": "no_license", "max_line_length": 88, "num_lines": 98, "path": "/ui/component.py", "repo_name": "MikoyChinese/Cap_tool", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nThis is the basic component for qtObject\n\"\"\"\nfrom PyQt5.QtWidgets import QLabel, QComboBox, QSizePolicy, QFrame, QWidget, QMessageBox\nfrom PyQt5.QtCore import Qt, QSize\nimport subprocess, cv2\n\n\"\"\"\n This part includes <basicLable, basicComboBox, basicQuitMsgBox, basicTool>.\n\"\"\"\n\n\nclass basicLabel(QLabel):\n\n def __init__(self, label_name=None, parent=None, width=417, height=307):\n super(basicLabel, self).__init__(parent)\n self.label_name = label_name\n self.config()\n self.setMinimumSize(QSize(int(width), int(height)))\n # Picture auto adaptation.\n self.setScaledContents(True)\n # Label Frame\n self.setFrameShape(QFrame.Box)\n self.setAlignment(Qt.AlignCenter)\n self.setObjectName(self.label_name)\n\n def config(self):\n sizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(0)\n sizePolicy.setVerticalStretch(0)\n sizePolicy.setHeightForWidth(self.sizePolicy().hasHeightForWidth())\n self.setSizePolicy(sizePolicy)\n\n\nclass basicComboBox(QComboBox):\n\n def __init__(self, object_name=None, QWidget_parent=None):\n super(basicComboBox, self).__init__(QWidget_parent)\n # Itself name.\n self.object_name = object_name\n self.config()\n self.setObjectName(self.object_name)\n\n def config(self):\n sizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)\n sizePolicy.setHorizontalStretch(0)\n sizePolicy.setVerticalStretch(0)\n sizePolicy.setHeightForWidth(self.sizePolicy().hasHeightForWidth())\n self.setSizePolicy(sizePolicy)\n\n\nclass basicQuitMsgBox(QWidget):\n\n def __init__(self):\n super(basicQuitMsgBox, self).__init__()\n\n def closeEvent(self, event):\n reply = QMessageBox.question(self, 'Msg:', '确认退出吗?',\n QMessageBox.Yes | QMessageBox.No,\n QMessageBox.No)\n if reply == QMessageBox.Yes:\n event.accept()\n else:\n event.ignore()\n\n\nclass basicTool():\n \"\"\"\n This script will show all Cameras path on you pc and finally return a list.\n The availableLabel is for showing the cameras' pics I like, if you want\n display in your way, edit it anytime.\n \"\"\"\n def availableCamera(self):\n cmd = 'ls /dev/video*'\n cam_lst = subprocess.getoutput(cmd).splitlines()\n for cam in cam_lst:\n capture = cv2.VideoCapture(cam)\n if not capture.isOpened():\n cam_lst.remove(cam)\n return cam_lst\n\n def availableLabel(self, lst=list, count=int):\n if count == 2:\n lst.pop(1)\n if count == 4:\n lst.pop(1)\n lst.pop(3)\n if count == 5:\n lst.pop(4)\n return lst\n\n\nif __name__ == '__main__':\n t = basicTool()\n lst = t.availableCamera()\n print(len(lst))\n pass" }, { "alpha_fraction": 0.5325342416763306, "alphanum_fraction": 0.5409402251243591, "avg_line_length": 35.66857147216797, "blob_id": "ab3fdfc147d8d599a671449ffbdb1fba9067bc75", "content_id": "f5a760d426bdab7b847e97037000291aec93e222", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6424, "license_type": "no_license", "max_line_length": 127, "num_lines": 175, "path": "/cameraModule.py", "repo_name": "MikoyChinese/Cap_tool", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\"\"\"\n--------------------------------------------------------------------------------\nThis part is main for Camera, it include the class Camera, Timer, subThread\nsave_img_timer.\nCamera: a basic class such like cv2.VideoCapture().\nTimer: a Timer thread, to control your process when to start and do what.\nSave_img_Timer: a subThread to handle save img.\n--------------------------------------------------------------------------------\n\"\"\"\n\nimport cv2, os, sys, time\nimport numpy as np\nfrom loggingModule import MyLogging\nfrom PyQt5.QtGui import QImage, QPixmap\nfrom PyQt5.QtCore import QThread, QMutex, QMutexLocker, pyqtSignal\n\n\nclass Camera():\n\n def __init__(self, capture=cv2.VideoCapture, width=800, height=600,\n label=None, label_name=None):\n # Get the capture object from the MainWindow init.\n self.capture = capture\n self.capture.set(3, int(width))\n self.capture.set(4, int(height))\n self.width = width\n self.height = height\n self.label = label\n self.label_name = label_name\n self.currentFrame = np.array([])\n self.logger = MyLogging(logger_name='user').logger\n self.logger.info('Create [Capture]: %s [Label_name]: %s'\n %(self.capture, self.label_name))\n\n def getFrame(self):\n try:\n # Get frame and convert it to PixMap\n ret, img = self.capture.read()\n img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n # Add the hisself Cap object's label name in pic top.\n if self.label_name:\n cv2.putText(img, self.label_name,(18,56), 0, 1,\n (129,216,207), 3)\n # Get the height, width, byserPer from this img.\n height, width, bytesPer = img.shape\n # bytesPerLine = bytesPer*3\n # Convert the img to QPixmap type, because the QtLabel just\n # accept this type.\n img = QImage(img, width, height,\n QImage.Format_RGB888)\n img = QPixmap.fromImage(img)\n # Set label show img. And update it.\n self.label.setPixmap(img)\n except:\n self.logger.error('Some error happened in <cameraModule.py | '\n 'getFrame>.')\n sys.exit(0)\n\n def refresh(self):\n # 1. Create a new Thread Class and Init it.\n self.cap_timer = Camera_Timer()\n self.logger.info(\"Camera_Timer: [%s] has created.\" % self.cap_timer)\n # 2. Connect the thing let signal to do it.\n self.cap_timer.update.connect(self.getFrame)\n # 3. Start Thread.\n try:\n self.cap_timer.start()\n except BaseException:\n self.cap_timer.stop()\n self.cap_timer.quit()\n\n def quit(self):\n self.cap_timer.stop()\n self.cap_timer.quit()\n\n\nclass Camera_Timer(QThread):\n update = pyqtSignal()\n def __init__(self):\n super(Camera_Timer, self).__init__()\n self.stoped = False\n self.mutex = QMutex()\n\n def run(self):\n while not self.stoped:\n self.update.emit()\n time.sleep(0.27)\n\n def stop(self):\n with QMutexLocker(self.mutex):\n self.stoped = True\n\n\n def isStoped(self):\n with QMutexLocker(self.mutex):\n return self.stoped\n\n\nclass Save_img_Timer(QThread):\n\n \"\"\"\n Send_msg will emit the capture process to capWindow's textBroswer.\n Creat_dirs_msg will emit the new creating dirs to capWindow's textBroswer.\n\n :param\n parent: It is the who create or init The Save_img_Timer class.\n cap_Objects: a list of all avaliable Cameras on PC.\n time: The time of capturing 32 pics to spend.\n save_dirs: a list of to save dirs to save img by needing.\n img_names: a list of how to save img with your name.\n \"\"\"\n send_msg = pyqtSignal(str)\n creat_dirs_msg = pyqtSignal(str)\n\n def __init__(self, parent=None, cap_Objects=None, time=float):\n super(Save_img_Timer, self).__init__()\n self.parent = parent\n self.cap_Objects = cap_Objects\n self.time = time/32\n self.save_dirs = []\n self.img_names = []\n self.logger = MyLogging(logger_name='user').logger\n\n for cap_Object in self.cap_Objects:\n self.save_dirs.append(self.parent.save_path + self.parent.cvid + '/'\n + cap_Object.label_name + '/' + \\\n self.parent.direction + '/')\n\n self.img_names.append(self.parent.cvid + '_' + self.parent.char + '_' + \\\n self.parent.date + '_' + cap_Object.label_name +\\\n '_' + self.parent.direction)\n\n for save_dir in self.save_dirs:\n if not os.path.exists(save_dir):\n os.makedirs(save_dir)\n self.logger.info('Create dir [%s]' % save_dir)\n msg = 'Create dir [%s]' % save_dir\n self.creat_dirs_msg.connect(self.parent.update_textBrowser)\n self.creat_dirs_msg.emit(msg)\n\n\n self.index = 1\n self.isStop = False\n\n def run(self):\n msg = '[CVID]: %s [DIRECTION]: %s [ACTIVED]: Start.' % (self.parent.cvid, self.parent.direction)\n self.logger.info(msg)\n self.send_msg.emit(msg)\n num = len(self.cap_Objects)\n while not self.isStop:\n self.imgs = []\n self.file_names = []\n for i in range(num):\n ret, img = self.cap_Objects[i].capture.read()\n file_name = self.img_names[i] + '_' + str(self.index) + '.jpg'\n self.imgs.append(img)\n self.file_names.append(file_name)\n\n for i in range(num):\n cv2.imwrite(self.save_dirs[i] + self.file_names[i], self.imgs[i])\n\n msg = '[Cvid]: %s [Direction]: %s | ----------> %d\\n' % (self.parent.cvid, self.parent.direction, self.index)\n self.send_msg.emit(msg)\n\n self.index += 1\n time.sleep(self.time)\n if self.index > 32:\n self.isStop = True\n msg = '[CVID]: %s [DIRECTION]: %s [ACTIVED]: End.' % (self.parent.cvid, self.parent.direction)\n self.logger.info(msg)\n self.send_msg.emit(msg)\n self.parent.ui.cap_ok_Button.setEnabled(True)\n break\n\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.6325376629829407, "alphanum_fraction": 0.6673043370246887, "avg_line_length": 52.439998626708984, "blob_id": "7235b6ff0088a67331253c76d15772a0491ab991", "content_id": "7242034c978ac31f4abc3c291fccf142c3fe1f86", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12077, "license_type": "no_license", "max_line_length": 104, "num_lines": 225, "path": "/ui/capwindow.py", "repo_name": "MikoyChinese/Cap_tool", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'mainwindow.ui'\n#\n# Created by: PyQt5 UI code generator 5.9\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\nfrom ui.component import basicLabel\nimport sys\n\n\nclass Cap_MainWindow(object):\n def setupUi(self, MainWindow):\n MainWindow.setObjectName(\"MainWindow\")\n MainWindow.resize(1280, 800)\n sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(0)\n sizePolicy.setVerticalStretch(0)\n sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())\n MainWindow.setSizePolicy(sizePolicy)\n MainWindow.setMinimumSize(QtCore.QSize(1280, 800))\n MainWindow.setSizeIncrement(QtCore.QSize(10, 10))\n self.centralWidget = QtWidgets.QWidget(MainWindow)\n self.centralWidget.setMinimumSize(QtCore.QSize(1024, 720))\n self.centralWidget.setObjectName(\"centralWidget\")\n self.gridLayout = QtWidgets.QGridLayout(self.centralWidget)\n self.gridLayout.setContentsMargins(9, 0, 11, 11)\n self.gridLayout.setSpacing(6)\n self.gridLayout.setObjectName(\"gridLayout\")\n self.gridLayout_3 = QtWidgets.QGridLayout()\n self.gridLayout_3.setContentsMargins(11, 11, 11, 9)\n self.gridLayout_3.setSpacing(6)\n self.gridLayout_3.setObjectName(\"gridLayout_3\")\n self.cap_next_Button = QtWidgets.QPushButton(self.centralWidget)\n self.cap_next_Button.setObjectName(\"cap_next_Button\")\n self.gridLayout_3.addWidget(self.cap_next_Button, 3, 1, 1, 1)\n self.label_date = QtWidgets.QLabel(self.centralWidget)\n self.label_date.setMinimumSize(QtCore.QSize(113, 32))\n self.label_date.setAlignment(QtCore.Qt.AlignCenter)\n self.label_date.setObjectName(\"label_date\")\n self.gridLayout_3.addWidget(self.label_date, 0, 2, 1, 1)\n self.label_direction = QtWidgets.QLabel(self.centralWidget)\n self.label_direction.setMinimumSize(QtCore.QSize(113, 32))\n self.label_direction.setAlignment(QtCore.Qt.AlignCenter)\n self.label_direction.setObjectName(\"label_direction\")\n self.gridLayout_3.addWidget(self.label_direction, 1, 2, 1, 1)\n self.lineEdit_4 = QtWidgets.QLineEdit(self.centralWidget)\n self.lineEdit_4.setMinimumSize(QtCore.QSize(198, 32))\n self.lineEdit_4.setObjectName(\"lineEdit_4\")\n self.lineEdit_4.setText('1')\n self.gridLayout_3.addWidget(self.lineEdit_4, 1, 3, 1, 1)\n self.lineEdit_2 = QtWidgets.QLineEdit(self.centralWidget)\n self.lineEdit_2.setMinimumSize(QtCore.QSize(199, 32))\n self.lineEdit_2.setObjectName(\"lineEdit_2\")\n self.lineEdit_2.setText('0')\n self.gridLayout_3.addWidget(self.lineEdit_2, 1, 1, 1, 1)\n self.lineEdit = QtWidgets.QLineEdit(self.centralWidget)\n self.lineEdit.setMinimumSize(QtCore.QSize(199, 32))\n self.lineEdit.setObjectName(\"lineEdit\")\n self.gridLayout_3.addWidget(self.lineEdit, 0, 1, 1, 1)\n self.lineEdit_3 = QtWidgets.QLineEdit(self.centralWidget)\n self.lineEdit_3.setMinimumSize(QtCore.QSize(198, 32))\n self.lineEdit_3.setObjectName(\"lineEdit_3\")\n self.lineEdit_3.setText('201809')\n self.gridLayout_3.addWidget(self.lineEdit_3, 0, 3, 1, 1)\n self.lineEdit_5 = QtWidgets.QLineEdit(self.centralWidget)\n self.lineEdit_5.setMinimumSize(QtCore.QSize(318, 32))\n self.lineEdit_5.setObjectName(\"lineEdit_5\")\n self.gridLayout_3.addWidget(self.lineEdit_5, 2, 1, 1, 2)\n self.label_char = QtWidgets.QLabel(self.centralWidget)\n self.label_char.setMinimumSize(QtCore.QSize(96, 32))\n self.label_char.setAlignment(QtCore.Qt.AlignCenter)\n self.label_char.setObjectName(\"label_char\")\n self.gridLayout_3.addWidget(self.label_char, 1, 0, 1, 1)\n self.label_11 = QtWidgets.QLabel(self.centralWidget)\n self.label_11.setMinimumSize(QtCore.QSize(96, 32))\n self.label_11.setAlignment(QtCore.Qt.AlignCenter)\n self.label_11.setObjectName(\"label_11\")\n self.gridLayout_3.addWidget(self.label_11, 2, 0, 1, 1)\n self.cap_ok_Button = QtWidgets.QPushButton(self.centralWidget)\n self.cap_ok_Button.setObjectName(\"cap_ok_Button\")\n self.gridLayout_3.addWidget(self.cap_ok_Button, 3, 2, 1, 2)\n self.toolButton = QtWidgets.QToolButton(self.centralWidget)\n self.toolButton.setIconSize(QtCore.QSize(63, 21))\n self.toolButton.setArrowType(QtCore.Qt.DownArrow)\n self.toolButton.setObjectName(\"toolButton\")\n self.gridLayout_3.addWidget(self.toolButton, 2, 3, 1, 1)\n self.label_cvid = QtWidgets.QLabel(self.centralWidget)\n self.label_cvid.setMinimumSize(QtCore.QSize(96, 32))\n self.label_cvid.setAlignment(QtCore.Qt.AlignCenter)\n self.label_cvid.setObjectName(\"label_cvid\")\n self.gridLayout_3.addWidget(self.label_cvid, 0, 0, 1, 1)\n self.cap_quit_Button = QtWidgets.QPushButton(self.centralWidget)\n self.cap_quit_Button.setMinimumSize(QtCore.QSize(96, 32))\n self.cap_quit_Button.setObjectName(\"cap_quit_Button\")\n self.gridLayout_3.addWidget(self.cap_quit_Button, 3, 0, 1, 1)\n self.textBrowser = QtWidgets.QTextBrowser(self.centralWidget)\n self.textBrowser.setMinimumSize(QtCore.QSize(630, 148))\n self.textBrowser.setFrameShape(QtWidgets.QFrame.WinPanel)\n self.textBrowser.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.textBrowser.setLineWidth(1)\n self.textBrowser.setMidLineWidth(0)\n self.textBrowser.setObjectName(\"textBrowser\")\n self.gridLayout_3.addWidget(self.textBrowser, 0, 4, 4, 1)\n self.gridLayout_3.setColumnStretch(0, 96)\n self.gridLayout_3.setColumnStretch(1, 199)\n self.gridLayout_3.setColumnStretch(2, 96)\n self.gridLayout_3.setColumnStretch(3, 199)\n self.gridLayout_3.setColumnStretch(4, 630)\n self.gridLayout.addLayout(self.gridLayout_3, 1, 0, 1, 1)\n self.verticalLayout = QtWidgets.QVBoxLayout()\n self.verticalLayout.setContentsMargins(11, 11, 11, 11)\n self.verticalLayout.setSpacing(6)\n self.verticalLayout.setObjectName(\"verticalLayout\")\n self.gridLayout_2 = QtWidgets.QGridLayout()\n self.gridLayout_2.setContentsMargins(5, 5, 5, 5)\n self.gridLayout_2.setHorizontalSpacing(4)\n self.gridLayout_2.setVerticalSpacing(3)\n self.gridLayout_2.setObjectName(\"gridLayout_2\")\n\n self.label_1 = basicLabel(label_name='label_1',\n parent=self.centralWidget, width=414,\n height=259)\n self.label_2 = basicLabel(label_name='label_2',\n parent=self.centralWidget, width=414,\n height=259)\n self.label_3 = basicLabel(label_name='label_3',\n parent=self.centralWidget, width=414,\n height=259)\n\n self.label_4 = basicLabel(label_name='label_4',\n parent=self.centralWidget, width=414,\n height=260)\n self.label_5 = basicLabel(label_name='label_5',\n parent=self.centralWidget, width=414,\n height=260)\n self.label_6 = basicLabel(label_name='label_6',\n parent=self.centralWidget, width=414,\n height=260)\n\n self.gridLayout_2.addWidget(self.label_1, 1, 0, 1, 1)\n self.gridLayout_2.addWidget(self.label_2, 1, 1, 1, 1)\n self.gridLayout_2.addWidget(self.label_3, 1, 2, 1, 1)\n self.gridLayout_2.addWidget(self.label_4, 2, 0, 1, 1)\n self.gridLayout_2.addWidget(self.label_5, 2, 1, 1, 1)\n self.gridLayout_2.addWidget(self.label_6, 2, 2, 1, 1)\n\n\n self.label_10 = QtWidgets.QLabel(self.centralWidget)\n self.label_10.setObjectName(\"label_10\")\n self.gridLayout_2.addWidget(self.label_10, 0, 1, 1, 1, QtCore.Qt.AlignHCenter)\n self.verticalLayout.addLayout(self.gridLayout_2)\n self.gridLayout.addLayout(self.verticalLayout, 0, 0, 1, 1)\n MainWindow.setCentralWidget(self.centralWidget)\n self.menuBar = QtWidgets.QMenuBar(MainWindow)\n self.menuBar.setGeometry(QtCore.QRect(0, 0, 1280, 31))\n self.menuBar.setObjectName(\"menuBar\")\n self.menuCap_Tool = QtWidgets.QMenu(self.menuBar)\n self.menuCap_Tool.setObjectName(\"menuCap_Tool\")\n self.menuInfo = QtWidgets.QMenu(self.menuCap_Tool)\n self.menuInfo.setObjectName(\"menuInfo\")\n MainWindow.setMenuBar(self.menuBar)\n self.action_Author = QtWidgets.QAction(MainWindow)\n self.action_Author.setObjectName(\"action_Author\")\n self.actionEmail = QtWidgets.QAction(MainWindow)\n self.actionEmail.setObjectName(\"actionEmail\")\n self.actionHelp = QtWidgets.QAction(MainWindow)\n self.actionHelp.setCheckable(False)\n self.actionHelp.setObjectName(\"actionHelp\")\n self.menuInfo.addAction(self.action_Author)\n self.menuInfo.addAction(self.actionEmail)\n self.menuCap_Tool.addAction(self.menuInfo.menuAction())\n self.menuCap_Tool.addAction(self.actionHelp)\n self.menuBar.addAction(self.menuCap_Tool.menuAction())\n\n self.retranslateUi(MainWindow)\n QtCore.QMetaObject.connectSlotsByName(MainWindow)\n MainWindow.setTabOrder(self.lineEdit, self.lineEdit_3)\n MainWindow.setTabOrder(self.lineEdit_3, self.lineEdit_2)\n MainWindow.setTabOrder(self.lineEdit_2, self.lineEdit_4)\n MainWindow.setTabOrder(self.lineEdit_4, self.lineEdit_5)\n MainWindow.setTabOrder(self.lineEdit_5, self.toolButton)\n MainWindow.setTabOrder(self.toolButton, self.cap_ok_Button)\n MainWindow.setTabOrder(self.cap_ok_Button, self.cap_quit_Button)\n MainWindow.setTabOrder(self.cap_quit_Button, self.cap_next_Button)\n MainWindow.setTabOrder(self.cap_next_Button, self.textBrowser)\n\n def retranslateUi(self, MainWindow):\n _translate = QtCore.QCoreApplication.translate\n MainWindow.setWindowTitle(_translate(\"MainWindow\", \"MainWindow\"))\n self.cap_next_Button.setText(_translate(\"MainWindow\", \"Next[转变方向]\"))\n self.label_date.setText(_translate(\"MainWindow\", \"Date[生产日期]\"))\n self.label_direction.setText(_translate(\"MainWindow\", \"Direction[方向]\"))\n self.label_char.setText(_translate(\"MainWindow\", \"Char[形态]\"))\n self.label_11.setText(_translate(\"MainWindow\", \"存储目录\"))\n self.cap_ok_Button.setText(_translate(\"MainWindow\", \"Capture[开始拍照]\"))\n self.toolButton.setText(_translate(\"MainWindow\", \"...\"))\n self.label_cvid.setText(_translate(\"MainWindow\", \"Cvid[商品名]\"))\n self.cap_quit_Button.setText(_translate(\"MainWindow\", \"设置速度\"))\n\n self.label_1.setText(_translate(\"MainWindow\", \"cap1\"))\n self.label_2.setText(_translate(\"MainWindow\", \"cap2\"))\n self.label_3.setText(_translate(\"MainWindow\", \"cap3\"))\n self.label_4.setText(_translate(\"MainWindow\", \"Cap4\"))\n self.label_5.setText(_translate(\"MainWindow\", \"cap5\"))\n self.label_6.setText(_translate(\"MainWindow\", \"cap6\"))\n self.label_10.setText(_translate(\"MainWindow\", \"Cap_Tool : v1.0\"))\n\n self.menuCap_Tool.setTitle(_translate(\"MainWindow\", \"Cap Tool\"))\n self.menuInfo.setTitle(_translate(\"MainWindow\", \"Info\"))\n self.action_Author.setText(_translate(\"MainWindow\", \"__Author__: Mikoy\"))\n self.actionEmail.setText(_translate(\"MainWindow\", \"Email: [email protected]\"))\n self.actionHelp.setText(_translate(\"MainWindow\", \"Help\"))\n\n\nif __name__ == '__main__':\n\n app = QtWidgets.QApplication(sys.argv)\n mainWindow = QtWidgets.QMainWindow()\n ui = Cap_MainWindow()\n ui.setupUi(mainWindow)\n mainWindow.show()\n sys.exit(app.exec_())" }, { "alpha_fraction": 0.5667039155960083, "alphanum_fraction": 0.575419008731842, "avg_line_length": 33.28352355957031, "blob_id": "43c3e86999762349293de179703f0c6f9d1feef8", "content_id": "cb5e5df5337a60cbc63f0dfef808bbb3a1e818e2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8990, "license_type": "no_license", "max_line_length": 106, "num_lines": 261, "path": "/cap_tool.py", "repo_name": "MikoyChinese/Cap_tool", "src_encoding": "UTF-8", "text": "\n\"\"\"\nThis file uses PyQt5 to design a Application to Capture photoes.\n\n\"\"\"\n\nimport cv2, sys, os\nfrom PyQt5 import QtWidgets\nfrom ui.mainwindow import Ui_MainWindow\nfrom ui.component import basicTool\nfrom ui.capwindow import Cap_MainWindow\nfrom cameraModule import Camera, Save_img_Timer\nfrom PyQt5.QtWidgets import QMessageBox, QFileDialog, QInputDialog\n\n\nclass Init_config():\n\n def __init__(self, mainWindow=None):\n self.mainWindow = mainWindow\n self.ui = Ui_MainWindow()\n self.ui.setupUi(mainWindow)\n self.centralWidget = self.ui.centralWidget\n # List all label we have in the UI.\n self.label_lst = [self.ui.label_1, self.ui.label_2, self.ui.label_3,\n self.ui.label_4, self.ui.label_5, self.ui.label_6]\n # To save the cap_label init name.\n\n # Create the\n self.ui.origin_label_names = [\"None\", \"cap45a1\", \"cap60a1\", \"cap90a1\",\n \"cap45a2\", \"cap60a2\", \"cap90a2\"]\n\n self.cap_label_name = []\n # To save all available Camera name or path.\n self.cap_objects = []\n cam_lst = basicTool().availableCamera()\n self.label_lst = basicTool().availableLabel(lst=self.label_lst, count=len(cam_lst))\n\n\n for cam_name in cam_lst:\n cap = cv2.VideoCapture(cam_name)\n if cap.isOpened():\n self.cap_objects.append(cap)\n else:\n print('%s Camera can not open.' % cam_name)\n\n # Append the cap_label object.\n for i in range(len(self.cap_objects)):\n self.cap_label_name.append(\n Camera(capture=self.cap_objects[i], label=self.label_lst[i]))\n\n\n def show(self):\n # Start all available Camera Thread to show in the UI.\n for cap_label in self.cap_label_name:\n cap_label.refresh()\n\n def quit(self):\n for cap_label in self.cap_label_name:\n cap_label.quit()\n cv2.destroyAllWindows()\n for cap in self.cap_objects:\n cap.release()\n\n\n\nclass Init_Cap():\n\n def __init__(self, mainWindow=None, *accept_data):\n self.mainWindow = mainWindow\n self.get_data(accept_data)\n self.ui = Cap_MainWindow()\n self.ui.setupUi(mainWindow)\n self.centralWidget = self.ui.centralWidget\n self.time = 13.00\n self.set_time()\n\n # List all Cap label we have in the UI.\n self.label_lst = [self.ui.label_1, self.ui.label_2, self.ui.label_3,\n self.ui.label_4, self.ui.label_5, self.ui.label_6]\n\n # To save all available Camera name or path.\n self.cap_objects = []\n cam_lst = basicTool().availableCamera()\n self.label_lst = basicTool().availableLabel(lst=self.label_lst, count=len(cam_lst))\n\n for cam_name in cam_lst:\n cap = cv2.VideoCapture(cam_name)\n if cap.isOpened():\n self.cap_objects.append(cap)\n else:\n print('%s Camera can not open.' % cam_name)\n self.cap_label_name = []\n # Append the cap_label object.\n for i in range(len(self.cap_objects)):\n cap_index = int(self.label_name_index[i])\n self.cap_label_name.append(Camera(capture=self.cap_objects[cap_index],\n label=self.label_lst[i],\n label_name=self.label_name[cap_index],\n width=self.cap_width, height=self.cap_height))\n\n self.ui.cap_ok_Button.pressed.connect(self._data)\n self.ui.cap_quit_Button.clicked.connect(self.set_time)\n self.ui.cap_next_Button.clicked.connect(self.next)\n self.ui.toolButton.clicked.connect(self.select_folder)\n self.ui.cap_ok_Button.released.connect(self.start)\n self.ui.lineEdit.editingFinished.connect(self.set_default)\n\n\n def show(self):\n for cap in self.cap_label_name:\n cap.refresh()\n\n def select_folder(self):\n path = os.path.abspath(os.path.dirname(__file__))\n path = QFileDialog.getExistingDirectory(self.centralWidget, path)\n self.ui.lineEdit_5.setText(path)\n\n def set_default(self):\n self.ui.lineEdit_2.setText('0')\n self.ui.lineEdit_4.setText('1')\n\n def next(self):\n direction = int(self.ui.lineEdit_4.text())\n next = direction + 1\n self.ui.lineEdit_4.setText(str(next))\n\n def _data(self):\n self.cvid = self.ui.lineEdit.text()\n self.date = self.ui.lineEdit_3.text()\n self.char = self.ui.lineEdit_2.text()\n self.direction = self.ui.lineEdit_4.text()\n self.save_path = self.ui.lineEdit_5.text().strip()\n if self.save_path[-1] == '/':\n pass\n else:\n self.save_path += '/'\n\n def start(self):\n self.ui.cap_ok_Button.setEnabled(False)\n self.save_img_timer = Save_img_Timer(parent=self, cap_Objects=self.cap_label_name, time=self.time)\n self.save_img_timer.send_msg.connect(self.update_textBrowser)\n self.save_img_timer.start()\n\n\n\n def save_img(self, img, label_name):\n\n save_dir = self.save_path + self.cvid + '/' + label_name + '/' + \\\n self.direction + '/'\n img_name = self.cvid + '_' + self.char + '_' + self.date + '_' + \\\n label_name\n cv2.imwrite()\n\n\n\n def set_time(self):\n reply, ok = QInputDialog.getDouble(self.centralWidget, '拍照周期', '请输入拍照周期(单位 s): ', 13, 0, 999,3)\n if ok:\n self.time = reply\n\n\n def get_data(self, data):\n if len(data):\n self.cap_width = data[0]\n self.cap_height = data[1]\n self.label_name_index = data[2]\n self.label_name = data[3]\n\n def update_textBrowser(self, msg):\n self.ui.textBrowser.append(msg)\n\n\n\nclass Handle():\n def __init__(self, mainWindow=QtWidgets.QMainWindow,\n parent=Ui_MainWindow, widget=None, main=None):\n self.mainWindow = mainWindow\n self.parent = parent\n self.widget = widget\n self.main = main\n\n def quit(self):\n reply = QMessageBox.question(self.widget, 'Msg:', '确认退出吗?',\n QMessageBox.Yes | QMessageBox.No,\n QMessageBox.No)\n if reply == QMessageBox.Yes:\n self.mainWindow.close()\n else:\n pass\n\n def start(self):\n self.mainWindow.close()\n width, height, label_name_index, label_name = self.mainwindow_get_data()\n self.init_Cap = Init_Cap(self.mainWindow, width, height,\n label_name_index, label_name)\n self.init_Cap.show()\n self.init_Cap.mainWindow.show()\n\n\n\n\n def mainwindow_get_data(self):\n width = self.parent.lineEdit_width.text()\n height = self.parent.lineEdit_height.text()\n\n label_names = []\n origin_label_names = self.parent.origin_label_names.copy()\n origin_label_names.pop(0)\n comboBox_lst = [self.parent.comboBox_1.currentText(),\n self.parent.comboBox_2.currentText(),\n self.parent.comboBox_3.currentText(),\n self.parent.comboBox_4.currentText(),\n self.parent.comboBox_5.currentText(),\n self.parent.comboBox_6.currentText()]\n # Get the user choose comboBox value except None.\n for each in comboBox_lst:\n if each not in label_names and each != 'None':\n label_names.append(each)\n else:\n pass\n # Get the every Cap Object where it should show.\n label_name_index = []\n for each in label_names:\n label_name_index.append(origin_label_names.index(each))\n\n tmp_lst = []\n for i in range(len(label_name_index)):\n index = 0\n for each in label_name_index:\n if label_name_index[i] > int(each):\n index += 1\n tmp_lst.append(index)\n label_name_index = tmp_lst\n # The label_name_index will order by origin index, finally return\n # such as [1, 2, 0].\n\n return width, height, label_name_index, label_names\n\n\nif __name__ == '__main__':\n dir_path = os.path.join(os.path.dirname(__file__), 'log')\n if not os.path.exists(dir_path):\n os.makedirs(dir_path)\n\n app = QtWidgets.QApplication(sys.argv)\n mainWindow = QtWidgets.QMainWindow()\n\n main = Init_config(mainWindow)\n main.show()\n\n handle = Handle(mainWindow=mainWindow, parent=main.ui,\n widget=main.centralWidget, main=main)\n # Yes button what to do and Cancel button waht to do.\n main.ui.buttonBox.accepted.connect(main.quit)\n main.ui.buttonBox.accepted.connect(handle.start)\n main.ui.buttonBox.rejected.connect(handle.quit)\n\n\n mainWindow.update()\n mainWindow.show()\n\n sys.exit(app.exec_())\n\n" }, { "alpha_fraction": 0.6208716034889221, "alphanum_fraction": 0.6455629467964172, "avg_line_length": 48.87894821166992, "blob_id": "29123c69975b7e65fbdd3eba704ef18083dab8fc", "content_id": "81be56ae0327d37cedcbd702f98616ee7c91e84f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9481, "license_type": "no_license", "max_line_length": 104, "num_lines": 190, "path": "/ui/mainwindow.py", "repo_name": "MikoyChinese/Cap_tool", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'mainwindow.ui'\n#\n# Created by: PyQt5 UI code generator 5.9\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\nimport sys\nfrom ui.component import basicLabel, basicComboBox\n\n\nclass Ui_MainWindow(object):\n def setupUi(self, MainWindow):\n MainWindow.setObjectName(\"MainWindow\")\n # The main window size.\n MainWindow.resize(1280, 800)\n sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(0)\n sizePolicy.setVerticalStretch(0)\n sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())\n MainWindow.setSizePolicy(sizePolicy)\n MainWindow.setMinimumSize(QtCore.QSize(1280, 800))\n MainWindow.setSizeIncrement(QtCore.QSize(10, 10))\n self.centralWidget = QtWidgets.QWidget(MainWindow)\n self.centralWidget.setMinimumSize(QtCore.QSize(1024, 720))\n self.centralWidget.setObjectName(\"centralWidget\")\n self.gridLayout = QtWidgets.QGridLayout(self.centralWidget)\n self.gridLayout.setContentsMargins(9, 0, 11, 11)\n self.gridLayout.setSpacing(6)\n self.gridLayout.setObjectName(\"gridLayout\")\n self.verticalLayout = QtWidgets.QVBoxLayout()\n self.verticalLayout.setContentsMargins(5, 5, 5, 5)\n self.verticalLayout.setSpacing(6)\n self.verticalLayout.setObjectName(\"verticalLayout\")\n self.gridLayout_2 = QtWidgets.QGridLayout()\n self.gridLayout_2.setContentsMargins(5, 5, 5, 5)\n self.gridLayout_2.setHorizontalSpacing(4)\n self.gridLayout_2.setVerticalSpacing(3)\n self.gridLayout_2.setObjectName(\"gridLayout_2\")\n\n self.label_1 = basicLabel(label_name='label_1',\n parent=self.centralWidget)\n self.label_2 = basicLabel(label_name='label_2',\n parent=self.centralWidget)\n self.label_3 = basicLabel(label_name='label_3',\n parent=self.centralWidget)\n self.label_4 = basicLabel(label_name='label_4',\n parent=self.centralWidget)\n self.label_5 = basicLabel(label_name='label_5',\n parent=self.centralWidget)\n self.label_6 = basicLabel(label_name='label_6',\n parent=self.centralWidget)\n\n # The mainWindow labels, it will show the pics.\n self.gridLayout_2.addWidget(self.label_1, 1, 0, 1, 1)\n self.gridLayout_2.addWidget(self.label_2, 1, 1, 1, 1)\n self.gridLayout_2.addWidget(self.label_3, 1, 2, 1, 1)\n self.gridLayout_2.addWidget(self.label_4, 3, 0, 1, 1)\n self.gridLayout_2.addWidget(self.label_5, 3, 1, 1, 1)\n self.gridLayout_2.addWidget(self.label_6, 3, 2, 1, 1)\n\n # Create the default label name, if you want to change it, please\n # change it in the main process <cap_tool.py>.\n self.origin_label_names = [\"None\", \"cap45a1\", \"cap60a1\", \"cap90a1\",\n \"cap45a2\", \"cap60a2\", \"cap90a2\"]\n\n self.comboBox_1 = basicComboBox(object_name='comboBox_1',\n QWidget_parent=self.centralWidget)\n self.comboBox_2 = basicComboBox(object_name='comboBox_2',\n QWidget_parent=self.centralWidget)\n self.comboBox_3 = basicComboBox(object_name='comboBox_3',\n QWidget_parent=self.centralWidget)\n self.comboBox_4 = basicComboBox(object_name='comboBox_4',\n QWidget_parent=self.centralWidget)\n self.comboBox_5 = basicComboBox(object_name='comboBox_5',\n QWidget_parent=self.centralWidget)\n self.comboBox_6 = basicComboBox(object_name='comboBox_6',\n QWidget_parent=self.centralWidget)\n\n self.comboBox_lst = [self.comboBox_1, self.comboBox_2,\n self.comboBox_3, self.comboBox_4,\n self.comboBox_5, self.comboBox_6]\n for comboBox in self.comboBox_lst:\n comboBox.addItems(self.origin_label_names)\n\n\n self.gridLayout_2.addWidget(self.comboBox_1, 0, 0, 1, 1,\n QtCore.Qt.AlignHCenter)\n self.gridLayout_2.addWidget(self.comboBox_2, 0, 1, 1, 1,\n QtCore.Qt.AlignHCenter)\n self.gridLayout_2.addWidget(self.comboBox_3, 0, 2, 1, 1,\n QtCore.Qt.AlignHCenter)\n self.gridLayout_2.addWidget(self.comboBox_4, 2, 0, 1, 1,\n QtCore.Qt.AlignHCenter)\n self.gridLayout_2.addWidget(self.comboBox_5, 2, 1, 1, 1,\n QtCore.Qt.AlignHCenter)\n self.gridLayout_2.addWidget(self.comboBox_6, 2, 2, 1, 1,\n QtCore.Qt.AlignHCenter)\n\n self.verticalLayout.addLayout(self.gridLayout_2)\n self.gridLayout.addLayout(self.verticalLayout, 0, 0, 1, 1)\n self.horizontalLayout = QtWidgets.QHBoxLayout()\n self.horizontalLayout.setContentsMargins(11, 11, 11, 11)\n self.horizontalLayout.setSpacing(5)\n self.horizontalLayout.setObjectName(\"horizontalLayout\")\n\n self.label_width = QtWidgets.QLabel(self.centralWidget)\n self.label_width.setMinimumSize(QtCore.QSize(96, 32))\n self.label_width.setAlignment(QtCore.Qt.AlignCenter)\n self.label_width.setObjectName(\"label_width\")\n self.label_width.setText('Width[宽]:')\n self.lineEdit_width = QtWidgets.QLineEdit(self.centralWidget)\n self.lineEdit_width.setMinimumSize(QtCore.QSize(96, 32))\n self.lineEdit_width.setText('800')\n self.lineEdit_width.setObjectName(\"lineEdit_width\")\n\n self.horizontalLayout.addWidget(self.label_width)\n self.horizontalLayout.addWidget(self.lineEdit_width)\n\n self.label_height = QtWidgets.QLabel(self.centralWidget)\n self.label_height.setMinimumSize(QtCore.QSize(96, 32))\n self.label_height.setAlignment(QtCore.Qt.AlignCenter)\n self.label_height.setObjectName(\"label_height\")\n self.label_height.setText('Height[高]:')\n self.lineEdit_height = QtWidgets.QLineEdit(self.centralWidget)\n self.lineEdit_height.setMinimumSize(QtCore.QSize(96, 32))\n self.lineEdit_height.setText('600')\n self.lineEdit_height.setObjectName(\"lineEdit_height\")\n\n self.horizontalLayout.addWidget(self.label_height)\n self.horizontalLayout.addWidget(self.lineEdit_height)\n\n # MainWindow button\n self.buttonBox = QtWidgets.QDialogButtonBox(self.centralWidget)\n # self.buttonBox.setMinimumSize(640, 32)\n self.buttonBox.setStandardButtons(\n QtWidgets.QDialogButtonBox.Cancel | QtWidgets.QDialogButtonBox.Ok)\n self.buttonBox.setCenterButtons(True)\n self.buttonBox.setObjectName(\"buttonBox\")\n self.horizontalLayout.addWidget(self.buttonBox)\n\n self.gridLayout.addLayout(self.horizontalLayout, 1, 0, 1, 1)\n self.gridLayout.setRowStretch(0, 16)\n self.gridLayout.setRowStretch(1, 1)\n MainWindow.setCentralWidget(self.centralWidget)\n self.menuBar = QtWidgets.QMenuBar(MainWindow)\n self.menuBar.setGeometry(QtCore.QRect(0, 0, 1280, 31))\n self.menuBar.setObjectName(\"menuBar\")\n self.menuCap_Tool = QtWidgets.QMenu(self.menuBar)\n self.menuCap_Tool.setObjectName(\"menuCap_Tool\")\n self.menuInfo = QtWidgets.QMenu(self.menuCap_Tool)\n self.menuInfo.setObjectName(\"menuInfo\")\n MainWindow.setMenuBar(self.menuBar)\n self.action_Author_Mikoy = QtWidgets.QAction(MainWindow)\n self.action_Author_Mikoy.setObjectName(\"action_Author_Mikoy\")\n self.actionEmail_mikoychinese_gmail_com = QtWidgets.QAction(MainWindow)\n self.actionEmail_mikoychinese_gmail_com.setObjectName(\"actionEmail_mikoychinese_gmail_com\")\n self.actionHelp = QtWidgets.QAction(MainWindow)\n self.actionHelp.setCheckable(False)\n self.actionHelp.setObjectName(\"actionHelp\")\n self.menuInfo.addAction(self.action_Author_Mikoy)\n self.menuInfo.addAction(self.actionEmail_mikoychinese_gmail_com)\n self.menuCap_Tool.addAction(self.menuInfo.menuAction())\n self.menuCap_Tool.addAction(self.actionHelp)\n self.menuBar.addAction(self.menuCap_Tool.menuAction())\n\n self.retranslateUi(MainWindow)\n QtCore.QMetaObject.connectSlotsByName(MainWindow)\n\n def retranslateUi(self, MainWindow):\n _translate = QtCore.QCoreApplication.translate\n MainWindow.setWindowTitle(_translate(\"Cap_configurate\", \"Cap_configurate\"))\n\n self.menuCap_Tool.setTitle(_translate(\"MainWindow\", \"Cap Tool\"))\n self.menuInfo.setTitle(_translate(\"MainWindow\", \"Info\"))\n self.action_Author_Mikoy.setText(_translate(\"MainWindow\", \"__Author__: Mikoy\"))\n self.actionEmail_mikoychinese_gmail_com.setText(_translate(\"MainWindow\", \"Email: [email protected]\"))\n self.actionHelp.setText(_translate(\"MainWindow\", \"Help\"))\n\n\nif __name__ == '__main__':\n\n app = QtWidgets.QApplication(sys.argv)\n mainWindow = QtWidgets.QMainWindow()\n ui = Ui_MainWindow()\n ui.setupUi(mainWindow)\n mainWindow.show()\n sys.exit(app.exec_())\n" } ]
5
fisheye36/macsearch
https://github.com/fisheye36/macsearch
5e2be7394bf66d95ec6449243f132e78da45298b
d6b771add12eeafe10ea1888301442a07024801a
56b26817b42110076db15781569b813417764edb
refs/heads/main
2023-04-11T03:01:16.312970
2021-04-18T22:02:57
2021-04-18T22:02:57
359,181,271
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6600877046585083, "alphanum_fraction": 0.6618421077728271, "avg_line_length": 29.399999618530273, "blob_id": "9e2d5a9c6658f6e3bc15e84b970cc1b127517540", "content_id": "6b0e842b579ffa4bbd1082807dd298f03c5b6003", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2280, "license_type": "no_license", "max_line_length": 120, "num_lines": 75, "path": "/macsearch/main.py", "repo_name": "fisheye36/macsearch", "src_encoding": "UTF-8", "text": "import argparse\nimport sys\nfrom os import getenv\nfrom typing import Any, NoReturn\n\nimport requests\n\n\nAPI_KEY_ENV_VAR_NAME = 'MACSEARCH_API_KEY'\nAPI_KEY_FROM_ENV = getenv(API_KEY_ENV_VAR_NAME)\n\nAPI_URL = 'https://api.macaddress.io/v1'\nREQUEST_TIMEOUT = 1.0\n\n\ndef main() -> None:\n args = _parse_args()\n if args.api_key is None:\n _print_error_and_exit('Missing API key')\n\n try:\n response = query_api(api_key=args.api_key, mac_address=args.mac)\n except requests.RequestException as e:\n _print_error_and_exit(f'Error while doing the request: {e}')\n\n try:\n response_json = response.json()\n except ValueError:\n _print_error_and_exit('Error while parsing JSON response')\n\n try:\n _print_device_manufacturer(response_json, response.status_code, mac_address=args.mac)\n except KeyError as e:\n _print_error_and_exit(f'Expected response JSON key not found: {e}')\n\n\ndef _parse_args() -> argparse.Namespace:\n parser = argparse.ArgumentParser(prog='macsearch', description='Search network device manufacturer by MAC address.')\n parser.add_argument('mac', help='MAC address')\n parser.add_argument('--api-key', help=f'API key to use, supersedes environment variable {API_KEY_ENV_VAR_NAME}')\n\n args = parser.parse_args()\n if args.api_key is None:\n args.api_key = API_KEY_FROM_ENV\n\n return args\n\n\ndef _print_error_and_exit(error: Any) -> NoReturn:\n print(error, file=sys.stderr)\n exit(1)\n\n\ndef query_api(api_key: str, mac_address: str) -> requests.Response:\n api_params = {\n 'apiKey': api_key,\n 'output': 'json',\n 'search': mac_address,\n }\n response = requests.get(API_URL, params=api_params, timeout=REQUEST_TIMEOUT)\n return response\n\n\ndef _print_device_manufacturer(response_json: dict[str, Any], response_status_code: int, mac_address: str) -> None:\n if response_status_code == requests.codes.ok:\n manufacturer_name = response_json['vendorDetails']['companyName']\n print(f'Company that manufactured the device with MAC address {mac_address} is:\\n'\n f'{manufacturer_name}')\n else:\n error_message = response_json['error']\n _print_error_and_exit(f'API error: {error_message}')\n\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.6216216087341309, "alphanum_fraction": 0.6381381154060364, "avg_line_length": 23.66666603088379, "blob_id": "a92adc92cd9e4fb79ee456f22f2d06b71d3fd64a", "content_id": "75483a64d171feeb2f9621ac8b987e5c4b896c51", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 666, "license_type": "no_license", "max_line_length": 68, "num_lines": 27, "path": "/setup.py", "repo_name": "fisheye36/macsearch", "src_encoding": "UTF-8", "text": "from pathlib import Path\n\nfrom setuptools import find_packages, setup\n\n\nhere = Path(__file__).parent.resolve()\nreadme = (here / 'README.md').read_text()\n\nsetup(\n name='macsearch',\n version='1.0.0',\n description='Search network device manufacturer by MAC address',\n long_description=readme,\n long_description_content_type='text/markdown',\n author='Kamil Warchoł',\n author_email='[email protected]',\n url='https://github.com/fisheye36/macsearch',\n packages=find_packages(),\n install_requires=[\n 'requests==2.25.1',\n ],\n entry_points='''\n [console_scripts]\n macsearch=macsearch.main:main\n ''',\n python_requires='>= 3.6',\n)\n" }, { "alpha_fraction": 0.7374100685119629, "alphanum_fraction": 0.7478744387626648, "avg_line_length": 29.579999923706055, "blob_id": "1f226755085b2524d9357f13d686ad1ab4017740", "content_id": "80566c629e154bf5f54cebe06a26a255c6f2e474", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3058, "license_type": "no_license", "max_line_length": 119, "num_lines": 100, "path": "/README.md", "repo_name": "fisheye36/macsearch", "src_encoding": "UTF-8", "text": "# Description\n\n`macsearch` is a simple tool that returns manufacturer name of a network device, given its MAC address.\n\nIt uses web API provided by https://macaddress.io/.\n\n# Usage\n\n## Standard installation\n\nUsing Python, preferably in a virtual environment:\n\n```shell\n$ python -m macsearch 44:38:39:ff:ef:57 --api-key \"${MACSEARCH_API_KEY}\"\nCompany that manufactured the device with MAC address 44:38:39:ff:ef:57 is:\nCumulus Networks, Inc\n```\n\nActually, when environment variable `MACSEARCH_API_KEY` is set, you don't need to provide it explicitly. On the other\nhand, when provided, it will supersede the value set by that environment variable.\n\n### Help\n\nTo see parameters and options available when running the tool:\n\n```shell\n$ python -m macsearch --help\nusage: macsearch [-h] [--api-key API_KEY] mac\n\nSearch network device manufacturer by MAC address.\n\npositional arguments:\n mac MAC address\n\noptional arguments:\n -h, --help show this help message and exit\n --api-key API_KEY API key to use, supersedes environment variable MACSEARCH_API_KEY\n```\n\n## `setup.py` installation\n\nWhen installed using `setup.py` script, `macsearch` command is accessible in your `PATH`:\n\n```shell\nmacsearch 44:38:39:ff:ef:57 --api-key \"${MACSEARCH_API_KEY}\"\n```\n\n## Using Docker\n\nAfter building Docker image, use it like that:\n\n```shell\ndocker run --env MACSEARCH_API_KEY=\"${MACSEARCH_API_KEY}\" macsearch-container macsearch 44:38:39:ff:ef:57\n```\n\n# Installation\n\n## Local installation\n\nAfter cloning this repository and ensuring you have your virtual environment set up, install all dependencies:\n\n```shell\npip install -U pip setuptools\npip install -r requirements.txt\n```\n\nYou can run the tool now using either `python -m macsearch` or `python macsearch/main.py`.\n\nAlternatively and more conveniently you can leverage `setup.py` script:\n\n```shell\npip install -U pip setuptools\npip install . # optionally add -e to be able to directly edit source code and see changes immediately\n```\n\nThis will allow you to directly use `macsearch`, assuming you have your virtual environment active.\n\n## Using Docker\n\nFirst, you need to build your image. If you want to embed the API key into the image, you can use `--build-arg` to set\nenvironment variable that will be available when container is run:\n\n```shell\ndocker build -t macsearch-container --build-arg MACSEARCH_API_KEY_ARG=\"${MACSEARCH_API_KEY}\" .\n```\n\nBe careful with that approach because anyone with access to this image could potentially see your API key. To be more\nsecure, build the image without API key embedded in it:\n\n```shell\ndocker build -t macsearch-container\n```\n\nKeep in mind that you will have to pass it every time you want to run the container, either by using `--api-key` option\nthat is exposed by `macsearch` tool itself, or `--env MACSEARCH_API_KEY=\"${MACSEARCH_API_KEY}\" exposed by Docker.\n\n# Security considerations\n\nApart from the fact that you should protect your API key, the tool itself should not pose any security threats, since\nall it does is a simple HTTPS GET request.\n" }, { "alpha_fraction": 0.7659279704093933, "alphanum_fraction": 0.7797784209251404, "avg_line_length": 31.81818199157715, "blob_id": "fae4f4827312677b2e57cd2ab88c5e94c6adb1c4", "content_id": "e38cf54d1cb5287ad29f5707e78763a54611e082", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Dockerfile", "length_bytes": 722, "license_type": "no_license", "max_line_length": 109, "num_lines": 22, "path": "/Dockerfile", "repo_name": "fisheye36/macsearch", "src_encoding": "UTF-8", "text": "FROM python:3.9\n\nRUN mkdir /application\nWORKDIR /application\n\nRUN pip install --upgrade pip setuptools\n\n# install dependencies\nCOPY requirements.txt /application\nRUN pip install -r requirements.txt\n\n# install application so that it can be used directly, without python -m <module_name>, or python <script.py>\nCOPY . /application\nRUN pip install .\n\n# keep in mind that anyone with access to the built image can see the API key\n# consider supplying environment variable when running the container, not during building\nARG MACSEARCH_API_KEY_ARG\nENV MACSEARCH_API_KEY=$MACSEARCH_API_KEY_ARG\n\n# use docker run <container-name> macsearch <other-mac-address> to query different MAC address\nCMD [\"macsearch\", \"44:38:39:ff:ef:57\"]\n" } ]
4
pradeeppanayal/pat
https://github.com/pradeeppanayal/pat
fe4c79a7b6e5139d899415d5c7a7b97ba77fd8c7
98d5a9a955fad673cce2bcfe0e5166b0b7ff8bb5
10013c3dd136f931a0f4c72d0bdd433da6c2040a
refs/heads/master
2020-12-02T17:38:04.568770
2017-07-21T11:33:27
2017-07-21T11:33:27
96,403,078
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5990239977836609, "alphanum_fraction": 0.6010573506355286, "avg_line_length": 28.2261905670166, "blob_id": "796167518664ffab66aa4ac442af25a82fa1e1a0", "content_id": "8895f128e8953e07089f17c6f65e749ad6d68733", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2459, "license_type": "no_license", "max_line_length": 137, "num_lines": 84, "path": "/cgi/cgi/v2/Lib/beans/beans.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python \n__author__ ='Pradeep'\n\n\n\n\nclass ServerInfo(object):\n def __init__(self):\n self.id = ''\n self.ip = ''\n self.username = ''\n self.password = ''\n self.identifier = ''\n self.status = 'Not checked'\n\n def getAttributes(self):\n return {'id':self.id,'ip':self.ip,'username':self.username,'password':self.password,'identifier':self.identifier,'status':self.status}\n\n def setAttributes(self,a):\n self.id = a['id']\n self.ip =a['ip']\n self.username = a['username']\n self.password = a['password']\n self.identifier = a['identifier']\n self.status = a['status']\n\nclass EnvInfo(ServerInfo):\n def __init__(self):\n super(EnvInfo,self).__init__()\n self.bootstrap = ''\n self.description = ''\n self.team = ''\n self.phase = ''\n\n def getAttributes(self):\n attr = super(EnvInfo,self).getAttributes()\n attr['bootstrap']= self.bootstrap\n attr['description']= self.description\n attr['team']= self.team\n attr['phase']= self.phase\n return attr\n\n def setAttributes(self,a):\n super(EnvInfo,self).setAttributes(a) \n self.bootstrap = a['bootstrap']\n self.description = a['description'] \n self.team = a['team'] \n self.phase = a['phase'] \n\nclass Hypervisor(ServerInfo):\n def __init__(self):\n super(Hypervisor,self).__init__()\n self.type = ''\n def getAttributes(self):\n attr = super(Hypervisor,self).getAttributes()\n attr['type']= self.type\n return attr\n\n def setAttributes(self,a):\n super(Hypervisor,self).setAttributes(a) \n self.type = a['type'] \n\nclass DHCPServer(ServerInfo):\n def __init__(self):\n super(DHCPServer,self).__init__()\n self.configStatus = 'Absolute'\n self.configAvailable ='Not found'\n self.configmd5 = ''\n self.configSynchUp='No info'\n\n def getAttributes(self):\n attr = super(DHCPServer,self).getAttributes()\n attr['configStatus']= self.configStatus\n attr['configAvailable']= self.configAvailable\n attr['configmd5']= self.configmd5\n attr['configSynchUp'] = self.configSynchUp\n return attr\n\n def setAttributes(self,a):\n super(DHCPServer,self).setAttributes(a) \n self.configStatus = a['configStatus'] \n self.configmd5 = a['configmd5'] \n self.configAvailable = a['configAvailable'] \n self.configSynchUp = a['configSynchUp']\n\n\n \n" }, { "alpha_fraction": 0.6620357036590576, "alphanum_fraction": 0.6691238880157471, "avg_line_length": 26.77165412902832, "blob_id": "fc9aa3781623bbe617ca52270dd3dc922c2f26cd", "content_id": "b7b80307e05f45a6a481223b53d2f426bc236f89", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3527, "license_type": "no_license", "max_line_length": 149, "num_lines": 127, "path": "/cgi/cgi/v2/deviceaction/Reload.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "# start by importing the library\n\n\nimport sys\nsys.path.append('../Lib')\n\nfrom commandexecutor import validateAuthentication\nfrom commandexecutor import executeCommand\n\nimport pyeapi \nimport re\n\n__author__ ='Pradeep CH'\n__version__='1.0.0'\n\n'''\nThis class contains list methods that allows to restart devices\n'''\nclass ReloadDevices(object):\n\tdef realoadDevice(self,ip,userName='cvpuser',pwd='root'):\n\t\t#self.log('Trying to Reset %s' %(ip)) \n\t\ttry:\n\t\t\t#self.log('Trying to connect to device')\n\t\t\tvalidateAuthentication(ip,userName,pwd)\n\t\texcept Exception as e:\n\t\t\t#self.log( 'Authetication failed' )\n\t\t\treturn 'Authentication failure'\n\t\t\n\t\ttry:\n\t\t\t#self.log('Deleteting startup config')\n\t\t\texecuteCommand(ip,['enable','delete flash:startup-config'],userName,pwd)\n\t\t\t#self.log( 'Startup config deleted')\n\t\texcept Exception as inst:\n\t\t\t#self.log( 'Satrtup config not deleted: '+str(inst) ) \n\t\t\tpass\n\t\ttry:\n\t\t\t#self.log( 'Deleteting zerotouch-config')\n\t\t\texecuteCommand(ip,['enable','delete flash:zerotouch-config'],userName,pwd)\n\t\t\t#self.log( 'zerotouch-config deleted' )\n\t\texcept Exception as inst:\n\t\t\t#self.log( 'Satrtup config not deleted: '+str(inst)) \n\t\t\tpass\n\t\ttry:\n\t\t\t#self.log( 'realoding device')\n\t\t\texecuteCommand(ip,['enable','reload now'],userName,pwd)\n\t\t\t#self.log( 'Device Reset triggered')\n\t\texcept Exception as inst:\n\t\t\t#self.log('Device restart triggered : '+str(inst))\n\t\t\tpass\n\n\t\t#self.log('%s Reset request completed with status %d' %(ip, status),True)\n\t\treturn 'Device Reset Process Initated..'\n\n\tdef writeToFile(self,msg):\n\t\twith open('result','a') as f:\n\t\t\tf.write('\\n' + msg)\t\n\tdef reloadDevicesFromFile(self,path):\n\t\tdata =''\n\n\t\tsplitRegEx = '[\\s\\n,]';\n\t\tipRegEx = '^(?:[0-9]{1,3}\\.){3}[0-9]{1,3}$'\n\n\t\twith open(path,'r') as f:\t\n\t\t\tdata = f.read();\n\n\t\tips = re.split(splitRegEx,data)\n\t\t#self.log('Extracted IPs :'+ str(ips))\n\t\t\n\t\tself.log('Enetr the credential \\n')\n\t\tenvUsername = self.readInput('Username :')\n\t\tenvPassword = self.readInput('Password :')\n \n\t\tfor ip in ips:\n\n\t\t\tif re.match(ipRegEx,ip) ==None :\n\t\t\t\tself.log('Invalid IP. IP %s skipped' %str(ip))\n\t\t\t\tcontinue\n\n\t\t\tif self.realoadDevice(ip,envUsername,envPassword) != 1:\n\t\t\t\tself.log('Reset failed')\t\n\t \n\tdef getEnvPassword(self):\n\t\tuname = self.readInput('Enter the enviornment details. This will be used if the default username and password fails to authenticate \\nUsername :');\n\t\tpwd = self.readInput('Password :');\n\t\treturn [uname,pwd]\n\n\tdef readInput(self,msg):\n\t\treturn raw_input(msg)\n\n\tdef log(self,msg,writeToFile=False):\n\t\tprint msg+'<br/>'\n\t\tif writeToFile:\n\t\t\tself.writeToFile(msg)\n\ndef main():\n\tdeviceLoader = ReloadDevices()\n\tch = 0\n\twhile ch !=4:\n\n\t\tmenu = '1. Reset a single device'\n\t\tmenu = menu + '\\n2. Reset devices from a file'\n\t\tmenu = menu + '\\n3. Reset devices from DHCP config'\n\t\tmenu = menu + '\\n4. Exit'\n\t\tmenu = menu + '\\nEnter your choice [1-4]:'\n\n\t\tch = int(deviceLoader.readInput(menu))\n\n\t\tif ch == 1:\n\t\t\tip = deviceLoader.readInput('Enter IP :')\n\t\t\tif deviceLoader.realoadDevice(ip)!= 1:\n\t\t\t\tretry = 'Reset failed try with another credentials (y/n):'\n\t\t\t\twhile deviceLoader.readInput(retry) == 'y':\n\t\t\t\t\t#get the input\n\t\t\t\t\tuname = deviceLoader.readInput('Username :')\n\t\t\t\t\tpwd = deviceLoader.readInput('Password :')\n\t\t\t\t\t\n\t\t\t\t\tif deviceLoader.realoadDevice(ip,uname,pwd)== 1:\n\t\t\t\t\t\tbreak;\n\t\t\t\t\telse:\n\t\t\t\t\t\tdeviceLoader.log('Reset failed.')\n\t\telif ch==2:\n\t\t\tfileName = deviceLoader.readInput('Enter the file path :')\n\t\t\tdeviceLoader.reloadDevicesFromFile(fileName)\n\t\t \n\t\t\t\nif __name__ == \"__main__\":\n\tmain()\n" }, { "alpha_fraction": 0.6428571343421936, "alphanum_fraction": 0.648809552192688, "avg_line_length": 22.85714340209961, "blob_id": "81c99c71008cb2ae04407d37c62458c196ea1b24", "content_id": "30d3ed3bafc9cee033b9dadf7f1c8d5724f74b0f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 168, "license_type": "no_license", "max_line_length": 68, "num_lines": 7, "path": "/cgi/cgi/v2/Lib/usermanager.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python2\n\n\n\nusers = {'pradeep':'myword','sangeeth':'sangu','shibi':'cheera'}\ndef authenticate(un,pwd):\n return un in users.keys() and users[un] == pwd\n\n" }, { "alpha_fraction": 0.5689757466316223, "alphanum_fraction": 0.5820012092590332, "avg_line_length": 24.208955764770508, "blob_id": "86d1bb61f6dbf3cf642b34723a74d6d68bab6315", "content_id": "b000f451e1060d2ab2249582ae0bdbdfac589d99", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1689, "license_type": "permissive", "max_line_length": 63, "num_lines": 67, "path": "/v2/js/scripter/upload.js", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "/*\nAuthor : Pradeep CH\nDate : 19-Jun-2017\nVersion : 1.0.0\nSince : 2.0.0\n*/\n \n\n//Actions \n\n$(document).on('change','#uploadScript',function(){\n var files= $('#uploadScript')[0].files\n if(files.length==0){\n $('#fileLabel').val('Select a script file');\n return\n }\n $('#fileLabel').val(files[0].name);\n});\n\n$(document).on('click', '#submit', function (event) { \n event.preventDefault(); \n\t\n loadResp('Uploading script. Please wait...')\n event.preventDefault();\n var files= $('#uploadScript')[0].files\n if(files.length==0){\n loadResp('No script selected')\n return\n }\n var file = files[0]\n var data = new FormData();\n data.append('script', file)\n data.append('action', 'upload')\n data.append('scriptname', $('#scriptname').val())\n data.append('username', $('#username').val())\n data.append('password', $('#password').val()) \n data.append('scriptType', $('#scriptType').val()) \n data.append('param', $('#param').val()) \n target = cgiPath +'scripter/scriptmanager.py'\n $.triggerPOSTCallWithoutContentType(target,data,loaActResp);\n});\n\n\n$(document).on('click', '#submit', function (event) { \n event.preventDefault(); \n $('#scriptname').val('')\n $('#username').val('')\n $('#password').val('')\n $('#scriptType').val('')\n $('#param').val('')\n $('#uploadScript').val('')\n $('#fileLabel').val('');\n});\nfunction loaActResp(resp){\n if(resp===undefined){\n loadResp('Invalid resp')\n return\n }\n var obj = jQuery.parseJSON( resp );\n if(obj['status']==='success'){\n $('#reset').click();\n } \n loadResp(obj['data']) \n}\nfunction loadResp(resp){\n\t$('#consoleContent').html(resp).fadeIn(1000);\n}\n" }, { "alpha_fraction": 0.6724137663841248, "alphanum_fraction": 0.6781609058380127, "avg_line_length": 25.615385055541992, "blob_id": "7994fd0df92cb8d081463c0290238b7df245b71d", "content_id": "b47518e000a9ad593ddbb99fcaa048cb91688772", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 348, "license_type": "no_license", "max_line_length": 70, "num_lines": 13, "path": "/cgi/cgi/v2/DeviceManager/Hypervisor/hypercommons.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "\nimport sys\nsys.path.append('../../Lib')\n\nfrom beans import Hypervisor\nfrom sqldb import db\n\ndef getServerInfoById(uid): \n s = Hypervisor()\n #def getData(self,tablename,keyset,key=None,keyidentifier = ''):\n data = db.getData('Hypervisors',s.getAttributes().keys(),uid,'id')\n assert len(data)==1,'No info'\n data = data[0]\n return data \n" }, { "alpha_fraction": 0.6145004630088806, "alphanum_fraction": 0.6304155588150024, "avg_line_length": 27.9743595123291, "blob_id": "d6dbe34ee328ef17b58de27ca7507025626ebec7", "content_id": "28763e24d8ca1baac6b3722841b5780b70a22b89", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1131, "license_type": "permissive", "max_line_length": 108, "num_lines": 39, "path": "/v2/js/DeviceManager/authenticate.js", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "\n/*\nAuthor : Pradeep CH\nDate : 10-Feb-2017\nVersion : 1.0.0\nSince : 2.0.0\n*/\n\ncheckParam();\n\nfunction checkParam(){\n\tfromIP = getParamValue(document.location.href,'fromIP') \n\ttoIP = getParamValue(document.location.href,'toIP') \n\tif(fromIP===undefined || toIP===undefined || toIP==\"\" || fromIP==\"\"){\n\t\treturn\n\t} \n\t//if it's va;lid perform click\n\t$('#ValAuthips').val(fromIP+'-'+toIP) \n\t//$('ValAuthsubmit').trigger( \"click\" );\n}\n\n$(document).on('click', '#ValAuthsubmit', function (event) { \n\t$('#ValAuthconsoleResult').html(\"Authentication in progress. Please wait...\").fadeIn(800);\t\t\t\n\tkeyword =$('#keyword').val()\n\tforce = $('#force').prop('checked')\n\ttarget = cgiPath + 'deviceaction/authenticationValidator.py'\n\tmethod ='POST'\n\tdata = {'username':$('#ValAuthusername').val(),'pwd':$('#ValAuthpwd').val(),'ips':$('#ValAuthips').val()} \n\t$.triggerCall(target,method,data,loadResp)\t\t\t \n});\n\n$(document).on('click', '#ValAuthReset', function (event) { \n\t$('#ValAuthusername').val(\"\")\n\t$('#ValAuthpwd').val(\"\")\n\t$('#ValAuthips').val(\"\")\n});\n\nfunction loadResp(resp){\n\t$('#ValAuthconsoleResult').html(resp).fadeIn(800); \n}\n" }, { "alpha_fraction": 0.6764488220214844, "alphanum_fraction": 0.6803945899009705, "avg_line_length": 30.395349502563477, "blob_id": "21dc4832bb6ee64da605f1dcc4d048375de9dd6f", "content_id": "24914d853ce2e0f3077b56adf83259fe155ec673", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4055, "license_type": "no_license", "max_line_length": 347, "num_lines": 129, "path": "/cgi/cgi/v2/DeviceManager/DHCP/configManager.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python2\n \nimport sys\nimport os\nfrom os.path import expanduser \nimport re\nimport json\nimport datetime\n\nsys.path.append('../../Lib')\n\nfrom commonutil import getRandomId \nfrom commonutil import getMD5 \nfrom beans import DHCPServer\nfrom sqldb import db\nfrom ParamikkoUtil import copyRemoteFile\n\nfrom dhcpconfig import DHCPDConfigParser\n\nimport cgi, cgitb \n\n__author__ ='Pradeep CH'\n__version__='1.0.0'\n \n\nremotepath = \"/etc/dhcp/dhcpd.conf\"\nsourcedirectory = expanduser(\"~\") + \"/pat/\"\nconfigdirectory = sourcedirectory +'dhcpconfig/'\ntargetName = 'DHCPServers'\n\ndef updateServer(s,key,keyidentifier): \n db.updateEntry(s,targetName,key,'id') \n\ndef getAllEnvs(): \n return db.getData('EnvInfo',['bootstrap','identifier','id']) \n\ndef getServer(uid): \n s = DHCPServer()\n #def getData(self,tablename,keyset,key=None,keyidentifier = ''):\n data = db.getData(targetName,s.getAttributes().keys(),uid,'id')\n assert len(data)==1,'No info'\n data = data[0]\n return data\n \ndef downloadConfig(ip,un,pwd,md5):\n localPath = configdirectory + ip+'.conf'\n copyRemoteFile(ip,un,pwd,remotepath,localPath) \n return getMD5(localPath)\n\ndef deleteAllRecords(ip):\n #db.performAction(self,target,action,condition):\n action ='delete'\n condition = 'serverIp like \"%s\"' %ip\n db.performAction('DHCPSubnets',action,condition)\n db.performAction('DHCPPools',action,condition)\n db.performAction('DHCPHosts',action,condition) \n\ndef parseConfig(ip):\n localPath = configdirectory + ip+'.conf'\n assert os.path.exists(localPath),'Requested config file is not available.'\n with open(localPath,'r') as f:\n data = f.read()\n assert data,'Invalid data recived from server'\n config = DHCPDConfigParser.parse(data)\n return config\n\ndef getStatusAndEnvByBootStrap(envs,bootstrap): \n if not bootstrap or bootstrap=='' or envs==None:\n return ['Not assigned','']\n for env in envs:\n if env['bootstrap']== bootstrap:\n return ['Assigned',env['id']]\n return ['Assigned','Env info not found']\n\ndef loadToDb(config,ip):\n envs =getAllEnvs()\n for subnet in config.subnets:\n subnetID= 'subnet_'+subnet.subnet+'_'+subnet.netmask\n row = {'id':subnetID,'serverIp':ip,'subnet':subnet.subnet,'netmask':subnet.netmask}\n db.addEntry(row,'DHCPSubnets')\n for pool in subnet.pools:\n poolId= getRandomId()\n [status,env] = getStatusAndEnvByBootStrap(envs,pool.bootfileName)\n poolrow = {'id':poolId,'subnetId':subnetID,'serverIp':ip,'assignedDate':'No info','rangeStart':pool.rangeStart,'rangeEnd':pool.rangeEnd,'bootfileName':pool.bootfileName, 'subnetMask':pool.subnetMask,'assignEndDate':'No info','routers':pool.routers,'status':status,'assignedEnv':env,'devicecount':len(pool.hosts),'hypervisor':'Not Mapped'}\n db.addEntry(poolrow,'DHCPPools')\n for host in pool.hosts:\n hostrow = {'mac':host.mac,'name':host.name,'ip':host.ip,'poolId':poolId,'serverIp':ip}\n db.addEntry(hostrow,'DHCPHosts')\n\ndef main():\n\tform = cgi.FieldStorage() \n\tact = form.getvalue('action') \n\tuid = form.getvalue('uid')\n\tassert uid, 'Invalid Param'\n\ts= getServer(uid)\n\tassert s,'Server info not found'\n\n\tip = s['ip']\n\tun = s['username']\n\tpwd = s['password'] \n\t\n\tassert ip and un and pwd, 'Invalid information recieved' \n\n\tif act == 'synch':\n\t\tmd5 =None\n\t\tmd5 = downloadConfig(ip,un,pwd,s['configmd5'])\n\t\tif md5!= s['configmd5']:\n\t\t\tdeleteAllRecords(ip)\n\t\t\tconfig = parseConfig(ip)\n\t\t\tloadToDb(config,ip)\n\t\t\tassert config,'Config file cannot be parced'\n\t\ts['configmd5'] = str(md5)\n\t\ts['configAvailable'] ='Avaialble'\n\t\ts['configStatus'] ='Up to date'\n\t\ts['configSynchUp'] = str(datetime.datetime.now().date())\n\t\tupdateServer(s,uid,'key') \n\t\tresp = {'status':'success','data':'Synch completed'} \n\telse:\n\t\tresp = {'status':'error','data':'Invalid action'} \n\t\n\tresp = json.dumps(resp)\t\t\n\tprint resp\n\nif __name__ == \"__main__\":\n print 'Content-type:text/html\\r\\n'\n try:\n main()\n except Exception as e :\n print json.dumps({'status':'error','data':'%s' %str(e)})\n \n\n\n" }, { "alpha_fraction": 0.6458067297935486, "alphanum_fraction": 0.6458067297935486, "avg_line_length": 20.550458908081055, "blob_id": "af1a520b16c99326309c9cada9ff06431ad438c0", "content_id": "3b932446313d94f65a63a59f01d189298fc1870e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2349, "license_type": "no_license", "max_line_length": 90, "num_lines": 109, "path": "/cgi/cgi/v2/command/specialCommand.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n \nimport sys\nsys.path.append('../Lib')\n\nfrom commandexecutor import executeCommand\n\n\n# Import modules for CGI handling \nimport cgi, cgitb \nimport re\nimport json\n\ndef log(msg,formatSpace=True):\n\tif formatSpace:\n\t\tmsg= str(msg).replace(' ','&nbsp;')\n\tprint msg+'<br/>'\n\ndef getTheFieldValue(keys,resp): \n\tjsonObj = None\n\ttry:\n\t\tjsonObj = json.loads(resp)\n\texcept Exception as e :\n\t\treturn 'Invalid resp from device. Cause : %s. Response : %s' %(str(e),str(resp))\n\tjsonObj = jsonObj[\"result\"]\n \n\t\n\tfor item in jsonObj:\n\t\ttry:\n\t\t\tfor key in keys:\n\t\t\t\tif key not in item.keys():\n\t\t\t\t\traise Exception ('Key %s not found' %key )\n\t\t\t\titem = item[key]\n\t\t\t\tresp = item\n\t\texcept Exception as e:\n\t\t\tresp ='Exception : %s' %(str(e))\n\tif not resp:\n\t\tresp =str(jsonObj)\t\n\treturn resp\n\t\n# Create instance of FieldStorage \nform = cgi.FieldStorage() \n\n#header\nprint \"Content-type:text/html\\r\\n\\r\\n\"\nprint \"<html>\"\nprint \"<head>\"\nprint \"<title>Hello - Second CGI Program</title>\"\nprint \"</head>\"\nprint \"<body>\"\nprint \"<a href='/pat/command/specialcommand.htm' style='color:blue'>Go back</a></br></br>\"\n\n\n# Get mode from fields \n \nips = form.getvalue('ips') \nun = form.getvalue('uname') \npwd = form.getvalue('password') \ncmd = form.getvalue('cmd') \nfilterDb = form.getvalue('filter') \njsonKeys=None\n\nif filterDb:\n\tjsonKeys = filterDb.split(' ')\n\ncmds=[]\n \nif not ips:\n\tlog('IP address required')\nelif not cmd:\n\tlog('There is no command to execute')\nelse:\n\tcmds= cmd.split('\\n') \n\tips = re.split('[\\n ,;]*',ips)\n\t\n\tif un and ips and pwd:\n\t\t\n\t\tfor ip in ips: \n\t\t\t#log('Executing command on device %s ' %ip)\n\t\t\ttry:\n\t\t\t\tresp = executeCommand(ip,cmds,un,pwd)\n\n\t\t\t\tif jsonKeys:\n\t\t\t\t\tresp = getTheFieldValue(jsonKeys,resp)\n\n\t\t\texcept Exception as e:\n\t\t\t\tresp = 'Failed to execute command at device %s, reson: %s' %(ip,str(e))\n\t\t\t\n\t\t\tlog('%s\\t\\t%s' %(ip,str(resp)),False)\n\t\t\t#log('Executing command on device %s completed' %ip)\n\telse:\t\t\n\t\tfor ip in ips: \n\t\t\t#log('Executing command on device %s ' %ip)\n\t\t\ttry:\n\t\t\t\tresp = executeCommand(ip,cmds)\n\n\t\t\t\tif jsonKeys:\n\t\t\t\t\tresp = getTheFieldValue(jsonKeys,resp)\n\n\t\t\texcept Exception as e:\n\t\t\t\tresp = 'Failed to execute command at device %s, reson: %s' %(ip,str(e))\n\t\t\t\n\t\t\tlog('%s\\t\\t%s' %(ip,str(resp)),False)\n\t\t\t#log('Executing command on device %s completed' %ip)\n\t\n\n#Footer\nprint \"</body>\"\nprint \"</html>\"\n" }, { "alpha_fraction": 0.6327345371246338, "alphanum_fraction": 0.6372255682945251, "avg_line_length": 25.263158798217773, "blob_id": "db2a874908cd5829b64bfa8b56834a9cd7e010b4", "content_id": "f04b92caee0ba446c95079e863124d288a59d3cd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2004, "license_type": "no_license", "max_line_length": 68, "num_lines": 76, "path": "/cgi/cgi/v2/Lib/vmdkstorage/vmdkManger.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "\n__author__='Pradeep CH'\n__version__ ='1.0.0'\n__date__= '29- Mar- 2017'\n\n\nimport os\nimport json\n\nfrom os.path import expanduser \nfrom getDataStore import getDataStore\n\nsourcedirectory = expanduser(\"~\") + \"/pat/\"\nvmdkFileRef = sourcedirectory +'vmdkmapper.db'\nvmInfoDirectory = sourcedirectory + 'vminfo/'\nvmdkSourceFolder = sourcedirectory + 'vmdks/'\n\ntargetPath = '/vmfs/volumes/%s/%s/'\n\ndef getDestinationPath(ip,un,pwd,vmname):\n\n if not os.path.exists(vmInfoDirectory):\n os.makedirs(vmInfoDirectory)\n data = readVmsInfo(ip,un,pwd)\n datastore = '' \n for key in data:\n if data[key]['name'] == vmname:\n datastore = data[key]['datastore']\n assert datastore!='', 'No info found'\n return targetPath%(datastore,vmname)\n\ndef readVmsInfo(ip,un,pwd,forceSynch=False):\n fname = vmInfoDirectory + ip+'vms.db'\n if not os.path.isfile(fname) or forceSynch:\n synchVM(ip,un,pwd)\n with open(fname,'r') as f:\n data = f.read()\n assert not data or data !='','VM Mapper not found' \n data = json.loads(data)\n return data\n\ndef synchVM(ip,un,pwd):\n fname = vmInfoDirectory + ip+'vms.db'\n\n data =getDataStore(ip,un,pwd)\n data = json.dumps(data)\n with open(fname,'w') as f:\n f.write(data)\n\ndef getVMDKFileLocation(ip,vmname):\n with open(vmdkFileRef,'r') as f:\n data = f.read()\n data = json.loads(data)\n\n if ip in data.keys():\n data = data[ip]\n else:\n data = data['default']\n\n source = data ['source']\n loc = ''\n if vmname in data.keys():\n loc = data[vmname]\n else:\n loc = data['default'] \n if source=='local':\n loc = vmdkSourceFolder+loc\n return [loc,source]\n\ndef getVMBaseInfo(ip,un,pwd,forceSynch):\n return readVmsInfo(ip,un,pwd,forceSynch)\n\n\ndef getVMDKInfo(ip,un,pwd,vmname): \n destPath = getDestinationPath(ip,un,pwd,vmname) \n [loc,source] = getVMDKFileLocation(ip,vmname)\n return {'destPath':destPath,'source':source,'sourceFilePath':loc}\n \n\n\n" }, { "alpha_fraction": 0.6162790656089783, "alphanum_fraction": 0.6162790656089783, "avg_line_length": 11, "blob_id": "003e6c10ef78683f9f91a96bcbf040b7ce0cd171", "content_id": "54698252e98c266e977e233ed891d4380ce6441a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 86, "license_type": "no_license", "max_line_length": 22, "num_lines": 7, "path": "/cgi/cgi/v2/Lib/htmlutil/__init__.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "__author__ =\"Pradeep\"\n__version__ ='develop'\n\n\nfrom HTML import HTML\n\nhtml = HTML()\n\n\n" }, { "alpha_fraction": 0.6517560482025146, "alphanum_fraction": 0.6540275812149048, "avg_line_length": 24.756755828857422, "blob_id": "fe153e75a5696f414e6176a29c08607836b6e47e", "content_id": "36ed9141fb1348c29569b13883d261835d0d8e03", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5723, "license_type": "no_license", "max_line_length": 82, "num_lines": 222, "path": "/cgi/cgi/v2/kvm/kvmmanger.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python2\n \nimport sys\nimport os\nfrom os.path import expanduser \nimport re\nimport json\n\nsys.path.append('../Lib')\nfrom commonutil import getRandomId\n\nfrom commandexecutor import executeCommandSSH \n\nimport cgi, cgitb \n\n__author__ ='Pradeep CH'\n__version__='1.0.0'\n\n\nsourcedirectory = expanduser(\"~\") + \"/pat/\"\n\nshowDeviceCmd = 'virsh list --all'\nstartDeviceCmd = 'virsh start %s'\nstopDeviceCmd = 'virsh destroy %s'\nserversPath = sourcedirectory + 'kvmservers.db'\n\n#create folder if its not exist\nif not os.path.exists(sourcedirectory):\n os.makedirs(sourcedirectory)\n open(serversPath,'w')\n\ndef loadDevices(ip,un,pwd):\n\tdata = executeCommandSSH(ip,showDeviceCmd,un,pwd);\n\tfullresp ={'status':'','data':''}\n\tif data:\n\t\tif type(data) == list: \n\t\t\tresp =[]\n\t\t\tfor item in data[1:]:\n\t\t\t\ttempitem =re.split('\\s+',item)\n\t\t\t\tif len(tempitem) < 4: \n\t\t\t\t\tcontinue \n\t\t\t\tresp.append({'id':tempitem[1],'device':tempitem[2],'status':tempitem[3]}) \t\t\t\t\n\t\t\tfullresp['status'] ='OK'\n\t\t\tfullresp['data'] = resp\n\t\telse: \n\t\t\tfullresp['status'] ='ERROR'\n\t\t\tfullresp['data'] = resp\n\telse:\n\t\tfullresp['status'] ='ERROR'\n\t\tfullresp['data'] = 'No response'\n\treturn fullresp\n\ndef start(ip,un,pwd,deviceName):\n\tcmd = startDeviceCmd %deviceName\n\tresult = executeCommandSSH(ip,cmd,un,pwd);\n\tresp =str(result)\n\tif type(result)== list:\n\t\tif len(result) >1:\n\t\t\treturn result[0]\n\t\treturn \"\".join(result)\n\treturn resp\n\ndef stop(ip,un,pwd,deviceName):\n\tcmd = stopDeviceCmd %deviceName \n\tresult = executeCommandSSH(ip,cmd,un,pwd);\n\tif type(result)== list:\n\t\tif len(result) >1:\n\t\t\treturn result[0]\n\t\treturn \"\".join(result)\n\treturn result\ndef stopAll(ip,un,pwd,devices):\n\tresp = ''\n\tdevices = devices.split(',')\n\tfor item in devices:\n\t\ttry: \n\t\t\tresp +='<br>' + str(stop(ip,un,pwd,item)) \n\t\texcept Exception as e:\n\t\t\tresp += '</br>Exception :' + str(e) \n\treturn resp\ndef startAll(ip,un,pwd,devices):\n\tresp = ''\n\tdevices = devices.split(',')\n\tfor item in devices:\n\t\ttry: \n\t\t\tresp +='<br>' + str(start(ip,un,pwd,item)) \n\t\texcept Exception as e:\n\t\t\tresp += '</br>Exception :' + str(e) \n\treturn resp\n\ndef readServers():\n\ttry:\n\t\tif not os.path.isfile(serversPath):\n\t\t\treturn {'status':'success','data':{}}\n\t\twith open(serversPath,'r') as f:\n\t\t\tdata = f.read()\n\t\t\tif data == '':\n\t\t\t\treturn {'status':'success','data':{}}\n\t\t\tjsonObj = json.loads(data)\n\t\t\tresp = [] \n\n\t\t\tfor key in jsonObj.keys():\n\t\t\t\tobj = jsonObj[key]\n\t\t\t\tobj['password']=''\n\t\t\t\tresp.append(obj)\n\t\t\t\n\t\t\treturn {'status':'success','data':resp}\n\n\texcept :\n\t\treturn {'status':'error','data':'Something went wrong'}\n\n#for internal purpose only\ndef readOrginalServer(uid):\n\tif not os.path.isfile(serversPath):\n\t\treturn\n\ttry:\n\t\twith open(serversPath,'r') as f:\n\t\t\tdata = f.read()\n\t\t\tif data == '':\n\t\t\t\treturn \n\t\t\tjsonObj = json.loads(data)\n\t\t\tif uid in jsonObj.keys(): \n\t\t\t\treturn jsonObj[uid] \n\texcept Exception as e:\n\t\t#TODO Log\n\t\treturn \n\ndef readServer(uid):\n\tserver = readOrginalServer(uid)\n\tif server:\n\t\tserver['password'] = ''\n\t\treturn {'status':'success','data':server}\n\t\t\n\telse:\n\t\treturn {'status':'error','data':'no data'}\n\ndef saveServer(ip,username,password,uid=None):\n\ttry:\n\t\tif not uid:\n\t\t\tuid = getRandomId()\n\n\t\tserver = {'ip':ip,'username':username,'password':password,'id':uid}\n\n\t\tservers = {} \n\t\tif os.path.isfile(serversPath): \t\t\n\t\t\twith open(serversPath,'r') as f:\n\t\t\t\tcontent= f.read()\t\t\t\n\t\t\t\tif not content == '': \n\t\t\t\t\tservers = json.loads(content) \n\n\t\twith open(serversPath,'w') as f: \n\t\t\tservers[uid]= server \n\t\t\tf.write(json.dumps(servers))\t\t\t\n\t\treturn 'Server saved successfully' \n\texcept Exception as e:\n\t\treturn 'Something went wrong. Error : %s' %(str(e)) \n\t\ndef getCurrentServer(uid):\n\tserver = readOrginalServer(uid)\n\tassert server, 'Server info not found'\n\treturn server\n\ndef main():\n\tform = cgi.FieldStorage() \n\tresp =''\n\tact = form.getvalue('action') \n\t\n\tuid = form.getvalue('uid')\n\t \n\n\tip = form.getvalue('ip')\n\tun = form.getvalue('uname')\n\tpwd =form.getvalue('pwd')\n\n\t#TODO validate\n\tif act=='loadDevices':\n\t\tserver = getCurrentServer(uid)\n\t\tresp =loadDevices(server['ip'],server['username'],server['password']) \n\t\tresp = json.dumps(resp)\n\telif act == 'stop':\n\t\tserver = getCurrentServer(uid)\n\t\tdname = form.getvalue('device')\n\t\tresp =stop(server['ip'],server['username'],server['password'],dname)\n\telif act == 'start':\n\t\tserver = getCurrentServer(uid)\n\t\tdname = form.getvalue('device')\n\t\tresp =start(server['ip'],server['username'],server['password'],dname)\n\telif act=='restart':\n\t\tserver = getCurrentServer(uid)\n\t\tdname = form.getvalue('device')\n\t\tresp +=stop(server['ip'],server['username'],server['password'],dname)\n\t\tresp +=start(server['ip'],server['username'],server['password'],dname)\n\telif act =='startall':\n\t\tserver = getCurrentServer(uid)\n\t\tdnames = form.getvalue('devices')\n\t\tresp +=startAll(server['ip'],server['username'],server['password'],dnames)\n\telif act =='stopall':\n\t\tserver = getCurrentServer(uid)\n\t\tdnames = form.getvalue('devices')\n\t\tresp +=stopAll(server['ip'],server['username'],server['password'],dnames)\n\telif act =='restartall':\n\t\tserver = getCurrentServer(uid)\n\t\tdnames = form.getvalue('devices')\n\t\tresp +=stopAll(server['ip'],server['username'],server['password'],dnames)\n\t\tresp +=startAll(server['ip'],server['username'],server['password'],dnames)\n\telif act=='loadServers':\n\t\tresp =readServers()\n\t\tresp = json.dumps(resp)\n\telif act=='loadServer': \n\t\tresp =readServer(uid)\n\t\tresp = json.dumps(resp) \n\telif act=='add' or act=='update': \n\t\tresp =saveServer(ip,un,pwd,uid) \t\t\t\n\telse:\n\t\tresp = 'Invalid action' \n\tprint resp\n\nif __name__ == \"__main__\":\n print 'Content-type:text/html\\r\\n'\n try:\n main()\n except Exception as e :\n print json.dumps({'status':'ERROR','data':'Exception %s' %str(e)})\n \n\n\n" }, { "alpha_fraction": 0.5774385929107666, "alphanum_fraction": 0.5830230712890625, "avg_line_length": 19.806201934814453, "blob_id": "7c8a244dfe78cf1037c0c5422e185b4e5ea2aaf1", "content_id": "14fe5fa8ff24fbdb20e38a9b93e9265358ffe4e2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2686, "license_type": "no_license", "max_line_length": 72, "num_lines": 129, "path": "/cgi/cgi/v2/IPSweep/ipsweep.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python2\n\nimport multiprocessing\nimport subprocess\nimport sys\nimport os\nsys.path.append('../Lib')\n\nfrom htmlutil import html\nfrom historyManger import saveHistory\nfrom IPAddressManager import parser\n# Import modules for CGI handling \nimport cgi, cgitb \n\n__author__ ='Pradeep CH'\n__version__='1.0.0'\n\n\n\ndef printPage(content):\n\thtml.printHeader(\"IP Sweep\") \n\thtml.printBodyContent(content) \n\ndef pinger( job_q, results_q ):\n DEVNULL = open(os.devnull,'w')\n while True:\n ip = job_q.get()\n if ip is None: break\n\n try:\n subprocess.check_call(['ping','-c1',ip],\n stdout=DEVNULL)\n results_q.put(ip)\n except:\n pass\n\ndef log(msg='',breakReq=True):\n msg.replace(' ','&nbsp;')\n if breakReq:\n msg += '</br>'\n print msg\n\ndef main(ips):\n\n ips =parser.parse(ips)\n aips = []\n\n assert ips, 'No ips to be validated'\n \n total = len(ips)\n \n pool_size = 20\n \n assert total > 0, 'No ips to be validated' \n\n jobs = multiprocessing.Queue()\n results = multiprocessing.Queue()\n\n pool = [ multiprocessing.Process(target=pinger, args=(jobs,results))\n for i in range(pool_size) ]\n\n for p in pool:\n p.start()\n\n for ip in ips:\n jobs.put(ip)\n\n for p in pool:\n jobs.put(None)\n\n for p in pool:\n p.join()\n\n success=0\n\n if results:\n success = results.qsize()\n content =''\n #summary\n #content +='Start IP : %s' %ipstart\n #content += 'Last IP : %s ' %ipend\n\n content +='Total IPs validated : %d ' %total\n content +='</br>' \n content +='Active IPs count : %d' %success\n content +='</br>' \n content +='Inactive IPs : %d' %(total-success) \n\n while not results.empty():\n \tip = results.get()\n aips.append(ip)\n\n inactiveIps=[ i for i in ips if i not in aips]\n\n #log('Inactive IPS :')\n #log(str(aips))\n \n #log('Inalive IPS :')\n #log(str(inactiveIps))\n aips.sort()\n inactiveIps.sort()\n\n \n content +='<h4>Active IPs</h4>' \n content +='</br>'.join(aips) \n content +='<h4>Inactive IPs</h4>'\n content +='</br>'.join(inactiveIps) \n try: \n\tipstart= ips[0]\n\tipend =ips[len(ips)-1] \n saveHistory(content,ipstart,ipend)\n except Exception as e:\n\tcontent += str(e)\n\tpass\n printPage(content)\n# Create instance of FieldStorage \nform = cgi.FieldStorage() \n\n# Get mode from fields \nips = form.getvalue('ips') \nif not ips:\n\tendip =form.getvalue('endip') \n\tstartip =form.getvalue('startip') \n\tif endip and startip:\n\t\tips = startip +'-'+endip\ntry:\n main(ips) \nexcept Exception as e:\n printPage('Exception '+str(e))\n\n\n" }, { "alpha_fraction": 0.6385809183120728, "alphanum_fraction": 0.646710991859436, "avg_line_length": 22.736841201782227, "blob_id": "44761052d08c0e44825fbad5b7e262fae340eb8e", "content_id": "0b292b0f0ab58d14dd05d4363fe74bd2d67cf332", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1353, "license_type": "no_license", "max_line_length": 93, "num_lines": 57, "path": "/cgi/cgi/v2/IPSweep/historyView.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python2\nimport sys\nimport os\nimport re\nimport json\nimport cgi, cgitb \n\n__author__ ='Pradeep CH'\n__version__='1.0.0'\n\n\n#sys.path.append('../Lib') \nfrom historyManger import getHistory,readHistory\n\ntargetUrl = '#'\ndef printPage(content):\n\t#html.printHeader(\"IP History\") \n\t#html.printBodyContent(content) \n\tprint 'Content-type:text/html\\r\\n\\r\\n'\n\tprint json.dumps(content)\n\ndef main():\n\tform = cgi.FieldStorage() \n\tact = form.getvalue('action') \n\tcontent = ''\n\tif act=='history':\n\t\tcontent = getFullHistory()\n\telif act=='loadhistory':\n\t\tfname = form.getvalue('fname') \n\t\tcontent = {'status':'success','data':readHistory(fname)}\n\telse:\n\t\tcontent ={'status':'error','data': 'Invalid action'}\n\tprintPage(content)\n\ndef getFullHistory(): \n\tcontent =''\n\ttry:\n\t\tfullcontent = []\n\t\thistory = getHistory() \n\t\tif len(history) ==0:\n\t\t\tfullcontent = []\n\t\telse: \n\t\t\tfor item in history: \n\t\t\t\tips = item.strip().split(' ') \n\t\t\t\ttry:\n\t\t\t\t\tassert len(ips) ==5, 'Inavlid entry at history tracker'\t\t\t\t\n\t\t\t\t\tcontent = {'fileName':ips[0],'startIp':ips[1],'endIp':ips[2],'date':ips[3]+' ' + ips[4]}\n\t\t\t\t\tfullcontent.append(content)\n\t\t\t\texcept:\n\t\t\t\t\tpass\n\t\tfullcontent.reverse()\n\t\tcontent = {'status':'success','data':fullcontent}\n\texcept Exception as e:\n\t\tcontent ={'status':'error','data':'Something went wrong.'}\n\treturn content\nif __name__ == \"__main__\":\n\tmain()\n" }, { "alpha_fraction": 0.6252175569534302, "alphanum_fraction": 0.6321794390678406, "avg_line_length": 28.890172958374023, "blob_id": "112a462d6b3e97ae6e5b66fcc0a026d675c6fcbb", "content_id": "65bce11e245a6d00b9628064dddb8ca6b9032a18", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 5171, "license_type": "permissive", "max_line_length": 188, "num_lines": 173, "path": "/v2/js/DeviceAllocator/Hypervisor/VMWare_manager.js", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "/*\nAuthor : Pradeep CH\nDate : 16-Jun-2017\nVersion : 1.0.0\nSince : 2.0.0\n*/\n\n \ncheckParam();\n\nfunction checkParam(){ \n\tuid = getParamValue(document.location.href,'uid') \n\tip = getParamValue(document.location.href,'ip') \n\tif(uid===undefined){\n\t\tshowProcess('Requested information not found. Please try again.')\n\t\treturn\n\t} \n\t$('#serverId').val(uid)\n\t$('.serverIP').html(ip)\n\tshowProcess('Loading devices. Please wait..')\n\tgetDevices(uid)\n}\n \nfunction getDevices(uid){ \n\ttarget =cgiPath + 'DeviceManager/Hypervisor/vmwaremanager.py'\n\tdata = {'action':'loadDevices','ip':'','uname':'','pwd':'','uid':uid}\n\tmethod ='POST'\n\t$.triggerCall(target,method,data,loadDevices)\n}\n\nfunction loadDevices(resp){ \n\tvar obj = jQuery.parseJSON(resp);\n\t$(\"#devices\").empty(); \n\tif(obj['status']==='success'){\n\t\taddHeader()\n\t\t$(obj['data']).each(function(){ \n\t\t\taddRow(this) \n\t\t});\n\t}else{\n\t\tshowResp(obj['data'])\n\t}\n\tshowProcessMsg('Devices loaded successfully.')\n}\n\nfunction addHeader(){ \n\t$('#devices').append('<tr><th><th><th>id</th><th>Name</th><th>Memory(MB)</th><th>vCPU</th><th>Status</th><th colspan=4></th></tr>')\n}\n\nfunction addRow(val){\n\t\tcheckBox = '<td><td><input type=\"checkbox\" class=\"select\" moid=\"'+val['id']+'\"/ ></td>'\n\t\tid = '<td>'+val['id']+'</td>'\n\t\tname = '<td>'+val['name']+'</td>'\n\t\tstatus = '<td>'+val['status']+'</td>'\n\t\tram = '<td>'+val['memory']+'</td>'\n\t\tcpu = '<td>'+val['cpu']+'</td>'\n\t\tstartLink = '<td><a href=\"#\" moid=\"'+ val['id']+'\" action =\"start\" class=\"dact\">Start</a></td>'\n\t\tstopLink = '<td><a href=\"#\" moid=\"'+ val['id']+'\" action =\"stop\" class=\"dact\">Stop</a></td>'\n\t\trestartLink = '<td><a href=\"#\" moid=\"'+ val['id']+'\" action =\"restart\" class=\"dact\">Restart</a></td>'\n\t\tconsoleLink = '<td><a href=\"#\" name=\"'+ val['name']+'\" action =\"console\" moid='+val['id']+' class=\"console\">Console</a></td>'\n\t\tdetailLink = '<td><a href=\"VMWare_Node_details.htm?name='+val['name']+'&serverId='+$('#serverId').val()+'&serverIp='+$('.serverIP').html()+'\" target=\"_default\" >'+val['name']+'</a></td>'\n\t\t$('#devices').append('<tr>' +checkBox+id+detailLink+ram+cpu+status+startLink+stopLink+restartLink+consoleLink+'</tr>'); \t\t\t\n}\n\n\n//Actions\n$(document).on('click', '.console', function (event) { \n\tshowProcess('Connecting to remote console. Please wait..')\n\tevent.preventDefault(); \n\tmoid = $(this).attr('moid')\n\ttarget =cgiPath + 'DeviceManager/Hypervisor/vmwaremanager.py'\n\tdata = {'action':'console','uid':uid,'moid':moid}\n\tmethod ='POST'\n\t$.triggerCall(target,method,data,showConsoleResp)\n});\n\n$(document).on('click', '.macList', function (event) { \n event.preventDefault(); \n window.location.href = \"managemac.htm?uid=\"+$('#serverId').val()+\"&ip=\"+$('.serverIP').html();\n});\n\n$(document).on('click', '.dact', function (event) { \n\tshowProcess('Performing action. Please wait..')\n\tevent.preventDefault(); \n\tmoid = $(this).attr('moid')\n\taction = $(this).attr('action')\n\tuid = $('#serverId').val()\n\ttarget =cgiPath + 'DeviceManager/Hypervisor/vmwaremanager.py'\n\tdata = {'action':action,'moid':moid,'uid':uid}\n\tmethod ='POST' \n\t$.triggerCall(target,method,data,showActionResp)\n});\n\n$(document).on('click', '#refresh', function (event) { \n\tgetDevices($('#serverId').val() )\n\tshowProcess('Refreshing..') \n});\n\n\n$(document).on('click', '.resetAct', function (event) { \n\tshowProcess('Performing action. Please wait..')\n\taction = $(this).attr('action')\n\tdevices= getSelectedDevices()\n\tconsole.log(devices.length)\n\tif(devices.length==0){ \n\t\treturn;\n\t} \n\tshowProcess('Loading response..')\n\tmethod ='POST'\n\ttarget =cgiPath + 'DeviceManager/Hypervisor/vmwaremanager.py'\n\tdata = {'action':action,'devices':devices.join(),'uid':uid} \n\t$.triggerCall(target,method,data,showActionResp)\n});\n\nfunction getSelectedDevices(){ \n\tmoids= []\n\t$('.select').each(function(i, obj) {\n\t\tif(obj.checked){ \t\t\t\t\n\t\t\tmoids.push($(obj).attr('moid'))\n\t\t}\n\t});\n\t\t\t\t\n\tif(moids.length==0){\n\t\tshowProcessMsg('No device(s) selected') \n\t} \n console.log(moids)\n return moids\n}\n$(document).on('click', '.allAct', function (event) { \n\t\tshowProcess('Performing action. Please wait..')\n\t\taction = $(this).attr('action') \n\t\tmoids= getSelectedDevices()\t \n\t\t\t\t\n\t\tif(moids.length==0){ \n\t\t\treturn;\n\t\t} \n\n\t\tshowProcess('Loading response..')\n\t\tmethod ='POST'\n\t\ttarget =cgiPath + 'DeviceManager/Hypervisor/vmwaremanager.py'\n\t\tdata = {'action':action,'moids':moids.join(),'uid':uid} \n\t\t$.triggerCall(target,method,data,showActionResp)\n});\n\nfunction showResp(resp){\n\t$('#consoleContent').html(resp).fadeIn(1000).fadeOut(3000)\n}\nfunction showProcess(resp){\n\t$('#loading').html(resp).fadeIn(1000)\n}\nfunction showProcessMsg(resp){\n\t$('#loading').html(resp).fadeIn(1000).fadeOut(3000)\n}\nfunction showConsoleResp(resp){\n\tif(resp ==null || resp===\"\"){\n\t\tshowProcessMsg('Invalid Response.')\n\t\treturn;\n\t}\n\tvar obj = jQuery.parseJSON(resp);\n\tshowProcessMsg(obj['data'])\n}\nfunction showActionResp(resp){\n\tif(resp ==null || resp===\"\"){\n\t\tshowProcessMsg('Invalid Response.')\n\t\treturn;\n\t}\n\tvar obj = jQuery.parseJSON(resp);\t \n\tshowResp(obj['data']) \n if(obj['status']==='error'){\n return;\n }\n\tgetDevices($('#serverId').val());\n\tshowProcess('Refreshing..') \n}\n" }, { "alpha_fraction": 0.607207179069519, "alphanum_fraction": 0.607207179069519, "avg_line_length": 24, "blob_id": "5c73d96e852284665fc09c24321552321c15dcd4", "content_id": "7196c11720b7a972bac19088aa7ca3d278ff2935", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 555, "license_type": "no_license", "max_line_length": 46, "num_lines": 22, "path": "/cgi/cgi/v2/Lib/patlogger/beans.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "\nimport datetime\nimport os\nfrom os.path import expanduser \n\n\nsourcedirectory = expanduser(\"~\") + \"/pat/\"\ndirectory = sourcedirectory +'logs/'\nlogFormat ='\\r\\n%s %s %s' \n\n\nclass PATLogger(object):\n def __init__(self,sourceName):\n self.clzz = sourceName\n self.fname = directory+sourceName+'.log'\n def log(self,msg,mode='debug'):\n t= str(datetime.datetime.now())\n log = logFormat %(t,self.clzz,msg)\n wm = 'w'\n if os.path.exists(self.fname):\n wm = 'a'\n with open(self.fname,wm) as f:\n f.write(log)\n\n\n \n" }, { "alpha_fraction": 0.6228373646736145, "alphanum_fraction": 0.624567449092865, "avg_line_length": 22.97916603088379, "blob_id": "7745505fbd09c39d37fee5e3dfe11d4957058b13", "content_id": "6c2cb26f3b132c76c94b2dcd5b2b32aed916888a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1156, "license_type": "no_license", "max_line_length": 63, "num_lines": 48, "path": "/cgi/cgi/v2/DeviceManager/SystemStatus/manager.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python2\n \nimport sys\nimport os\nfrom os.path import expanduser \nimport re\nimport json\nimport cgi\nimport subprocess\nsys.path.append('../../Lib')\nfrom ParamikkoUtil import checkAuthentication\n\ndef check(ip,un,pwd):\n try:\n DEVNULL = open(os.devnull,'w')\n subprocess.check_call(['ping','-c1',ip],\n stdout=DEVNULL) \n except:\n\treturn {'status':'success','data':'Not Reachable'}\n if not checkAuthentication(ip,un,pwd):\n\treturn {'status':'success','data':'Authentication Failure'}\n\n return {'status':'success','data':'Online'}\n\ndef main():\n\tform = cgi.FieldStorage() \n\tact = form.getvalue('action') \n\t \n\tresp =''\n \n\tif act == 'statusServer':\n\t\tun = form.getvalue('uname') \n\t\tip = form.getvalue('ip') \n\t\tpwd = form.getvalue('pwd') \n\t\tassert un and pwd and ip, 'Provide IP Username and Password' \n\t\tresp=check(ip,un,pwd)\n\telse:\n\t\tresp = {'status':'error','data':'Invalid action'} \n\t\n\tresp = json.dumps(resp)\t\t\n\tprint resp\n\nif __name__ == \"__main__\":\n print 'Content-type:text/html\\r\\n'\n try:\n main()\n except Exception as e :\n print json.dumps({'status':'error','data':'%s' %str(e)})\n \n\n\n" }, { "alpha_fraction": 0.6337355375289917, "alphanum_fraction": 0.6349924802780151, "avg_line_length": 28.909774780273438, "blob_id": "35b64a13a4a48ed089809e7e930d1e437c07246d", "content_id": "8184c752a923b5705e912ed1c96d169cb9cf829a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3978, "license_type": "no_license", "max_line_length": 162, "num_lines": 133, "path": "/cgi/cgi/v2/library/manage.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n# Import modules for CGI handling \nimport cgi, cgitb \nimport os\nfrom os.path import expanduser \nimport sys\nimport json\nimport datetime\n\nsys.path.append('../Lib') \n\nfrom sqldb import db\n\n#\n#Author Pradeep CH\n#\n__author__ ='Pradeep CH'\n__version__='1.0.0'\n \nsourcedirectory = expanduser(\"~\") + \"/pat/\"\nsourcePath = sourcedirectory + 'properties'\nmediaPath =''\nURLMediaPath =''\n\n\n#Extract the storage location\nwith open(sourcePath,'r') as f:\n lines = f.readlines()\n for line in lines:\n line.startswith('MEDIAPATH')\n parts = line.split(' ')\n mediaPath = parts[1]\n URLMediaPath =parts[2]\n\n\ndef storeFile(fileName,data):\n with open(fileName,'wb') as f:\n f.write(data)\ndef addEntry(fileName,subject,description,path,mediaType): \n db.addEntry({'filename':fileName,'subject':subject,'desc':description,'path':path,'date':str(datetime.datetime.now().date()),'mediatype':mediaType},'Library');\n\ndef upload(form): \n name = form.getvalue('name')\n destPath = mediaPath + name\n assert not os.path.exists(destPath), 'File already exist'\n try:\n uploadedFile = form['sourcefile']\n subject = form.getvalue('subject')\n desc = form.getvalue('desc')\n mediaType = form.getvalue('mediatype')\n assert subject and desc, 'Subject and description canot be empty'\n fileName = uploadedFile.filename \n assert fileName, 'Invalid file name'\n destPath = mediaPath + name\n urlPath = URLMediaPath + name\n storeFile(destPath,uploadedFile.file.read())\n addEntry(name,subject,desc,urlPath,mediaType)\n return {'status':'success','data':'File upload successfull'}\n except Exception as e: \n try:\n os.remove(destPath)\n except Exception as f: \n pass\n return {'status':'error','data':'File upload failed. Reson :' + str(e)}\n\ndef search(keyword):\n searchKeySet = ['filename','subject','desc','date']\n keySet = ['filename','subject','desc','path','date','id','mediatype']\n condition = ''\n if not keyword:\n keyword = ''\n for key in searchKeySet:\n if condition != '':\n condition +=' or '\n\n condition += key+' like '+'\"%'+keyword+'%\"'\n \n data =db.getDataWithCondition('Library',keySet,condition)\n return {'status':'success','data':data}\ndef download(fname):\n locaPath = mediaPath + fname\n assert os.path.exists(locaPath), 'Requested file does not exist' \n try: \n with open(locaPath,'r') as f:\n data = f.read()\n print 'Content-Disposition: attachment; filename=\"%s\"' % fname\n #print \"Content-Length: \" + str(os.stat(fullPath).st_size)\n print # empty line between headers and body\n print data\n\n except Exception as e:\n print \"Content-type:text/html\\r\\n\\r\\n\"\n print 'Unexpected error occured. %s' %str(e)\n\ndef delete(fname):\n locaPath = mediaPath + fname\n #assert os.path.exists(locaPath), 'Requested file does not exist' \n try:\n db.performAction('Library','delete',' filename like \"'+fname+'\";')\n os.remove(locaPath)\n return {'status':'success','data':'File deleted'}\n except Exception as e:\n return {'status':'error','data':'File may be already deleted or currepted'}\n\ndef main():\n form = cgi.FieldStorage() \n action = form.getvalue('action')\n #resp = json.loads(\"{}\")\n if action=='upload':\n resp = upload(form)\n elif action == 'search':\n keyword = form.getvalue('keyword')\n resp = search(keyword) \n elif action=='download':\n fname= form.getvalue('fname')\n download(fname)\n elif action=='delete':\n fname= form.getvalue('fname')\n resp = delete(fname)\n else:\n resp = {'status':'error','data':'invalid action'}\n\n if action!='download':\n print 'Content-type:text/html\\r\\n'\n print json.dumps(resp)\nif __name__=='__main__':\n \n try:\n main()\n except Exception as e:\n print 'Content-type:text/html\\r\\n'\n print json.dumps({'status':'error','data':'Exception ' +str(e)})\n" }, { "alpha_fraction": 0.7345254421234131, "alphanum_fraction": 0.7510316371917725, "avg_line_length": 33.380950927734375, "blob_id": "4136a3335a9dd8370ffeb6b6fad9b0ac38fa9b6d", "content_id": "32e846cf26ea1f174348f86ef9512478d69e8e82", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 727, "license_type": "no_license", "max_line_length": 109, "num_lines": 21, "path": "/README.md", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "# PAT\nPAT is an application developed with HTML, JS as frontend and Python as backend. It uses CGI to communicate. \n\n# Features\nPAT is multi utility application. PAT allows\n* To Generate/SIGN SSL certificate \n* To manage DHCP with VMware/Proxmos\n* Reset/Authentication check ARISTA EOS nodes\n* Image push and config push to EOS devices using eAPI/SSH\n* Easy remote SSH\n* Open VMWare node console in your machine from PAT server\n\n# Usage\n* Install java\n* Install python\n* Set JAVA_HOME or JRE_HOME\n* Change context to enable CGI and set python path\n* Copy \"v2\" folder to '/webapps/pat/' folder\n* Copy \"cgi\" to the configured cgi poth. Example /webapps/pat/WEB_INF/cgi\n* Start tomcat \nPAT URL : http://127.0.0.1:8080/pat/v2\n\n\n\n\n\n" }, { "alpha_fraction": 0.6918568015098572, "alphanum_fraction": 0.6962755918502808, "avg_line_length": 31.12753677368164, "blob_id": "5d796ee07f5aac6b24ea0875c693dd9054366004", "content_id": "93e31d3b4559d0be6813785e9f7dd322b52c3787", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11089, "license_type": "no_license", "max_line_length": 261, "num_lines": 345, "path": "/cgi/cgi/v2/ssl/manager.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python2\n \nimport sys\nimport os\nimport subprocess\nimport datetime\nimport zipfile\nimport shutil\n\nsys.path.append('../Lib')\nimport re\nimport json\n\nfrom commandexecutor import executeCommandSSH \n\nimport cgi, cgitb \n\n__author__ ='Pradeep CH'\n__version__='1.0.0'\n\nparentFolder ='resources'\ncaFile ='%s/caList' %parentFolder \ncaCertLoca = 'ROOT'\ncsrReqLoc = 'CSR'\nuserCertLoc = 'USERCERTS'\nsignedCertLoc = 'SIGNEDCERT'\ndeviceCertLoc = 'DEVICECERT'\n\nsampleConfigFileLocation = '%s/openssl.cnf' %parentFolder;\ncadirIdentifier = '<CA ROOT DIR>'\nprivateKeyRef = '<privatekey>'\npublicKeyRef = '<publickey>'\n\ndef log(msg,level='DEBUG'):\n\twith open('log','a') as f:\n\t\tfmsg = '\\n%s\\t%s\\t%s' %(str(datetime.datetime.now()) ,level, str(msg))\n\t\tf.write(fmsg)\n\ndef loadCA():\t\t \n\tresp = '{}'\n\tif not os.path.exists(caFile):\n\t\topen(caFile,'w').write('[]')\t\t\n\ttry:\n\t\twith open(caFile,'r') as f:\n\t\t\tdata = f.read()\n\t\t\tdata = json.loads(data) \n\t\t\tresp = {'status':'SUCCESS','data':data}\n\texcept Exception as e:\n\t\tresp ={'status':'ERROR','data':str(e)}\n\treturn json.dumps(resp);\n\ndef createFolderStructure(cn):\n\tdirectory = '%s/%s' %(parentFolder,cn)\n\t\n\tif not os.path.exists(directory):\n\t\tos.makedirs(directory)\n\t\tos.makedirs('%s/%s' %(directory,caCertLoca))\n\t\tos.makedirs('%s/%s' %(directory,csrReqLoc))\n\t\tos.makedirs('%s/%s' %(directory,userCertLoc))\n\t\tos.makedirs('%s/%s' %(directory,deviceCertLoc))\n\t\tos.makedirs('%s/%s' %(directory,signedCertLoc))\n\t\n\twith open('%s/serial' %directory,'w') as f:\n\t\tf.write('1000')\n\twith open('%s/index.txt' %directory,'w') as f:\n\t\tpass\n\t\ndef copyConfigFile(cn):\n\tcaLoc = '%s/%s' %(parentFolder,cn)\t\n\tprivateKeyLoc = '%s/%s-private.key' %(caCertLoca,cn)\n\tpublicKeyLoc = '%s/%s-public.cert' %(caCertLoca,cn)\n\tconfigFileLocation = '%s/%s/openssl.conf' %(parentFolder,cn)\n\t\n\twith open(sampleConfigFileLocation,'r') as f:\n\t\tdata = f.read()\n\t\tassert data !=None and len(data)>0, 'No config file'\n\t\tdata = data.replace(cadirIdentifier,caLoc)\n\t\tdata = data.replace(privateKeyRef,privateKeyLoc)\n\t\tdata = data.replace(publicKeyRef,publicKeyLoc) \n\n\t\twith open(configFileLocation,'w') as cFile:\n\t\t\tcFile.write(data)\n\ndef addToCAList(cn,org,orgUnit,country,email,state,locality): \n\tdata =None\n\twith open(caFile,'r') as f:\n\t\tdata = f.read()\n\twith open(caFile,'w') as f:\n\t\tif data ==None or data == '':\n\t\t\tdata =[]\n\t\telse:\n\t\t\tdata = json.loads(data) \n\t\tdata.append({'cn':cn,'org':org,'orgUnit':orgUnit,'country':country,'email':email,'state':state,'locality':locality})\n\t\tf.write(json.dumps(data) )\n\n\t\t\ndef addCA(cn,email,org,country,state,locality,orgUnit,update=False):\n\tdirectory = '%s/%s' %(parentFolder,cn)\n\trootDirectory = '%s/ROOT' %directory\n\tisFolderExist = os.path.exists(directory)\n\tif isFolderExist and not update:\n \t\treturn {'status':'ERROR','data':'Common name already exist'}\n\telif not isFolderExist:\n\t\tcreateFolderStructure(cn);\n\t\n\tbashCommand = \"openssl req -subj '/CN=%s/O=%s/OU=%s/C=%s/emailAddress=%s/ST=%s/L=%s' -new -newkey rsa:2048 -days 365 -nodes -x509 -keyout %s/%s-private.key -out %s/%s-public.cert\" %(cn,org,orgUnit,country,email,state,locality,rootDirectory,cn,rootDirectory,cn)\n\ttry: \n\t\toutput = executeCommand(bashCommand)\n\t\tif output:\n\t\t\tcopyConfigFile(cn)\n\t\t\taddToCAList(cn,org,orgUnit,country,email,state,locality)\n\t\t\treturn {'status':'SUCCESS','data':'Cert successfully created. Actual resp : ' + str(output)} \n\t\telse:\n\t\t\treturn {'status':'ERROR','data':'Cert creation failed' + output} \t\t\t\n\texcept Exception as e:\t\n\t\ttry:\n\t\t\t#delete the folder\n\t\t\tshutil.rmtree(directory)\n\t\texcept:\n\t\t\tpass\t\n\t\treturn {'status':'ERROR','data':'Exception %s' %str(e)} \n\ndef generateUserCert(ca,un):\t\t\n\ttry: \n\t\tdownloadFileName = '%s-user.zip' %un\n\t\t\n\t\tprivateKeyName = '%s-private.pem' %un\n\t\tcertName = '%s-cert.pem' %un\n\n\t\t[userCSRLoc,privateKeyLoc] = createUserCsr(ca,un) \n\t\tsignedCertPath = signUserCSR(userCSRLoc,ca,un) \n\t\tfiles = [{'file':privateKeyLoc,'fileName':privateKeyName},{'file':signedCertPath,'fileName':certName}]\n\t\tzipFile = zipFiles(files,downloadFileName)\n\t\tinitiateFileDownload(zipFile,downloadFileName,True) \n\t\ttry:\n\t\t\tos.remove(signedCertPath)\n\t\texcept:\n\t\t\tpass\n\texcept Exception as e: \n\t\treturn {'status':'ERROR','data':'Exception %s' %str(e)} \n\ndef generateDeviceCert(ca,deviceName):\t\t\n\ttry: \n\t\tdownloadFileName = '%s-device.zip' %deviceName\n\t\t\n\t\tprivateKeyName = '%s-private.pem' %deviceName\n\t\tcertName = '%s-cert.pem' %deviceName\n\n\t\t[deviceCSRLoc,privateKeyLoc] = createDeviceCsr(ca,deviceName) \n\t\tsignedCertPath = signDeviceCSR(deviceCSRLoc,ca,deviceName) \n\t\tfiles = [{'file':privateKeyLoc,'fileName':privateKeyName},{'file':signedCertPath,'fileName':certName}]\n\t\tzipFile = zipFiles(files,downloadFileName)\n\t\tinitiateFileDownload(zipFile,downloadFileName,True) \n\t\ttry:\n\t\t\tos.remove(signedCertPath)\n\t\texcept:\n\t\t\tpass\n\texcept Exception as e: \n\t\treturn {'status':'ERROR','data':'Exception %s' %str(e)} \n\ndef zipFiles(files,fileName):\n\tfullPath = '%s/%s' %(parentFolder,fileName)\n\twith zipfile.ZipFile(fullPath, \"w\") as f:\n\t\tfor fileItem in files:\n\t\t\tf.write(fileItem['file'],fileItem['fileName'])\n\treturn fullPath\n\ndef download(cn,act):\n\trootDirectory = '%s/%s/ROOT' %(parentFolder,cn)\n\tfullPath = ''\n\tdownloadFileName = ''\n\n\tif act == 'downloadPrivate':\n\t\tdownloadFileName = '%s-private.key' %cn \n\telse:\n\t\tdownloadFileName = '%s-public.cert' %cn \n\n\tfullPath = '%s/%s' %(rootDirectory,downloadFileName)\n\n\ttry:\n\t\tinitiateFileDownload(fullPath,downloadFileName,False) \n\texcept Exception as e:\n\t\tprint \"Content-type:text/html\\r\\n\\r\\n\"\n\t\tprint 'Unexpected error occured. %s' %str(e)\n\ndef executeCommand(cmd):\n\t\n\toutput = subprocess.check_call(cmd, shell=True) \n\tlog('Command %s :::: Output %s' %(str(cmd),str(output)))\n\treturn output==0 \n\ndef storeCSR(csr,csrName,caDir):\n\tcsrFileName = '%s/%s/%s' %(caDir,csrReqLoc,csrName)\n\twith open(csrFileName,'wb') as f:\n\t\tf.write(csr.file.read())\n\treturn csrFileName\n\ndef signCSR(csrFile,caLoc,targetFileName,validity=\"375\"):\n\tconfFileName = '%s/openssl.conf' %(caLoc)\n\ttargetFile = '%s/%s/%s' %(caLoc,signedCertLoc,targetFileName)\n\tcmd = 'openssl ca -batch -config %s -extensions server_cert -days %s -notext -md sha256 -in %s -out %s' %(confFileName,validity,csrFile,targetFile) \n\tif executeCommand(cmd):\n\t\treturn targetFile\n\ndef signUserCSR(userCertLoc,ca,un):\n\ttargetFileName = '%s_user_cert.pem' %(un)\n\tcaLoc ='%s/%s' %(parentFolder,ca)\n\tconfFileName = '%s/openssl.conf' %(caLoc)\n\ttargetLocation = '%s/%s/%s' %(caLoc,signedCertLoc,targetFileName)\n\n\tcmdSign = 'openssl ca -batch -config %s -extensions usr_cert -days 375 -notext -md sha256 -in %s -out %s' %(confFileName,userCertLoc,targetLocation)\n\tif executeCommand(cmdSign):\n\t\treturn targetLocation\n\ndef createUserCsr(ca,un): \n\tglobal parentFolder,userCertLoc\n\tuserDirctory = '%s/%s/%s' %(parentFolder,ca,userCertLoc) \n\n\tprivateKeyLoc = '%s/%s_privkey.pem' %(userDirctory,un) \n\tuserCSRLoc = '%s/user-%s.csr' %(userDirctory,un)\n\n\tcmdCsr = 'openssl req -new -newkey rsa:2048 -nodes -subj \"/CN=%s/O=PAYODA/OU=ARISTA/C=IN/[email protected]/ST=CBE/L=CBE\" -keyout %s -out %s' %(un,privateKeyLoc,userCSRLoc)\n\tif executeCommand(cmdCsr):\n\t\treturn [userCSRLoc,privateKeyLoc]\n\ndef signDeviceCSR(deviceCertLoc,ca,deviceName):\n\ttargetFileName = '%s_user_cert.pem' %(deviceName)\n\tcaLoc ='%s/%s' %(parentFolder,ca)\n\tconfFileName = '%s/openssl.conf' %(caLoc)\n\ttargetLocation = '%s/%s/%s' %(caLoc,signedCertLoc,targetFileName)\n\n\tcmdSign = 'openssl ca -batch -config %s -extensions server_cert -days 375 -notext -md sha256 -in %s -out %s' %(confFileName,deviceCertLoc,targetLocation)\n\tif executeCommand(cmdSign):\n\t\treturn targetLocation\n\ndef createDeviceCsr(ca,deviceName): \n\tglobal parentFolder,userCertLoc\n\tdeviceDirctory = '%s/%s/%s' %(parentFolder,ca,deviceCertLoc) \n\n\tprivateKeyLoc = '%s/%s_privkey.pem' %(deviceDirctory,deviceName) \n\tuserCSRLoc = '%s/device-%s.csr' %(deviceDirctory,deviceName)\n\n\tcmdCsr = 'openssl req -new -newkey rsa:2048 -nodes -subj \"/CN=%s/O=PAYODA/OU=ARISTA/C=IN/[email protected]/ST=CBE/L=CBE\" -keyout %s -out %s' %(deviceName,privateKeyLoc,userCSRLoc)\n\tif executeCommand(cmdCsr):\n\t\treturn [userCSRLoc,privateKeyLoc]\n\n\ndef initiateFileDownload(filePath,downloadFileName,remove=False):\n\tif filePath:\n\t\tdata = '' \n\t\twith open(filePath,'r') as f:\n\t\t\tdata = f.read()\n\n\t\tprint 'Content-Disposition: attachment; filename=\"%s\"' % downloadFileName\n\t\t#print \"Content-Length: \" + str(os.stat(fullPath).st_size)\n\t\tprint # empty line between headers and body\n\t\tprint data\n\t\ttry:\n\t\t\tif remove:\n\t\t\t\tos.remove(filePath)\n\t\texcept:\n\t\t\tpass\n\telse:\n\t\tprint \"Content-type:text/html\\r\\n\\r\\n\"\n\t\tprint '{\"status\":\"ERROR\",\"data\":\"No files to download\"}'\n\t\ndef uploadAndSignCSR(ca,CSR,csrName,validity):\n\t#print \"Content-type:text/html\\r\\n\\r\\n\"\n\t#print\n\tcaDir = '%s/%s' %(parentFolder,ca)\n\t#print caDir\n\tcsrFileName = storeCSR(CSR,csrName,caDir)\n\t#print csrFileName\n\ttargetFileName = '%s-signed.cert' %csrName\n\ttry:\n\t\ttargetCertPath = signCSR(csrFileName,caDir,targetFileName,validity)\n\t\tinitiateFileDownload(targetCertPath,targetFileName,True) \n\texcept Exception as e:\n\t\tprint \"Content-type:text/html\\r\\n\\r\\n\"\n\t\tprint 'Unexpected error occured. %s' %str(e)\n\ndef main():\n\tform = cgi.FieldStorage() \n\tact = form.getvalue('action') \t \n\tif(act == 'loadCA'):\n\t\tresp = loadCA() \n\t\tprint 'Content-type:text/html\\r\\n' \n\t\tprint resp\n\telif act == 'addCA':\n\t\tcn = form.getvalue('cn') \n\t\t\n\t\temail = form.getvalue('email') \n\t\tcountry = form.getvalue('country') \n\t\torg = form.getvalue('org') \n\t\tstate = form.getvalue('state') \n\t\tlocality = form.getvalue('locality') \n\t\torgUnit = form.getvalue('orgUnit') \n\t\tassert cn and email and org and country and state and locality and orgUnit,'Invalid Params'\n\t\tassert not \" \" in cn,'Commnon name should not have space'\n\t\t\n\t\tresp = addCA(cn,email,org,country,state,locality,orgUnit)\n\n\t\tprint 'Content-type:text/html\\r\\n'\n\t\tprint json.dumps(resp) \n\telif act=='downloadPrivate' or act=='downloadCert':\n\t\tca = form.getvalue('ca')\n\t\tdownload(ca,act)\t\n\telif act=='csr':\t\n\t\tuploadedFile = form['csrFile'] \n\t\tfileName = uploadedFile.filename\n\t\tcn = form.getvalue('ca') \n\t\tvalidity = form.getvalue('validity') \n\t\tassert fileName, 'Invalid file name'\n\t\tif not \tvalidity:\n\t\t\tvalidity = \"375\"\n\t\tvalidity = str(validity)\n\t\tassert validity.isdigit(), 'Invalid validity'\n\t\t\n\t\tuploadAndSignCSR(cn,uploadedFile,fileName,validity)\n\telif act =='userCert': \n\t\tcn = form.getvalue('ca') \n\t\tun = form.getvalue('username') \n\t\tassert un, 'Invalid username'\t \n\t\tresp = generateUserCert(cn,un)\n\t\tif resp :\n\t\t\tprint 'Content-type:text/html\\r\\n'\n\t\t\tprint json.dumps(resp)\n\telif act =='deviceCert': \n\t\tca = form.getvalue('ca') \n\t\tdeviceName = form.getvalue('deviceName') \n\t\tassert deviceName, 'Invalid device name'\t \n\t\tresp = generateDeviceCert(ca,deviceName)\n\t\tif resp :\n\t\t\tprint 'Content-type:text/html\\r\\n'\n\t\t\tprint json.dumps(resp)\n\telse:\n\t\tprint 'Content-type:text/html\\r\\n'\n\t\tprint '{\"status\":\"ERROR\",\"data\":\"Invalid action\"}' \n\nif __name__ == \"__main__\":\n try:\n main()\n except Exception as e :\n print 'Content-type:text/html\\r\\n'\n print json.dumps({'status':'ERROR','data':'Exception %s' %str(e)})\n \n\n\n" }, { "alpha_fraction": 0.6806883215904236, "alphanum_fraction": 0.6940726637840271, "avg_line_length": 23.85714340209961, "blob_id": "bfcf8dc0c0c23e1c4f278479092a9153d4474543", "content_id": "a1d9ebcfeb36116697da2c84f7822d10afb0afa0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 523, "license_type": "no_license", "max_line_length": 122, "num_lines": 21, "path": "/cgi/cgi/v2/Lib/vmactions/remoteconsole.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "\n'''\nAuthor : Pradeep CH\nVersion : Development\nSince : PAT V2\nDate : 10-Mar-2017\n'''\n\nimport sys\n\nsys.path.append('../Lib')\n\nfrom sshclient import SSHClient\n\ndef triggerRemoteConsole(clientIp,serverip,un,pwd,moid):\n\ttry:\n\t\tcmd = 'vmrc -H %s -U %s -P %s -M %s' %(serverip,un,pwd,moid)\n\t\tcl = SSHClient(clientIp)\n\t\tcl.executeCommand(cmd)\n\t\treturn 'Remote console successfull'\n\texcept Exception as e:\n\t\tassert False, 'Could not connect to remote machine. Make sure the plugin is running in your machine, Cause : %s' %str(e)\n" }, { "alpha_fraction": 0.719298243522644, "alphanum_fraction": 0.719298243522644, "avg_line_length": 21.399999618530273, "blob_id": "ede6da29f2468850f206e74221469576ddcf6924", "content_id": "4f3988f861f4e98068f8dd97b65efef1e6dd0d85", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 114, "license_type": "no_license", "max_line_length": 39, "num_lines": 5, "path": "/cgi/cgi/v2/Lib/beans/__init__.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python \n__author__ ='Pradeep'\n\nfrom beans import EnvInfo\nfrom beans import Hypervisor,DHCPServer\n \n" }, { "alpha_fraction": 0.5404670238494873, "alphanum_fraction": 0.5490818619728088, "avg_line_length": 29.797203063964844, "blob_id": "e2bf7f4c571ec4308d51a8854a827dd35a65dea4", "content_id": "e3ef6f676f87e114e9d9b2d96511a177ed0453ce", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4411, "license_type": "no_license", "max_line_length": 90, "num_lines": 143, "path": "/cgi/cgi/v2/Lib/dhcpconfig/DHCPDConfigParser.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "__author__='Pradeep CH'\n__version__ ='1.0.0'\n__date__= '10- Mar- 2017'\n\nimport sys\nsys.path.append('/home/local/PAYODA/pradeep.k/Python/DHCP Hosts/')\nimport re\nfrom dhcpconfig import Config,Pool,Host,Subnet\n\nclass DHCPDConfigParser(object):\n\n @staticmethod\n def parse(data):\n conf = Config()\n data =DHCPDConfigParser.preprocess(data) \n blocks = DHCPDConfigParser.getAsBlocks(data)\n subnetBlocks = blocks['subnet'] \n \n for subnetBlock in subnetBlocks: \n subnet = DHCPDConfigParser.extractSubnet(subnetBlock)\n subnetSpecBlocks = DHCPDConfigParser.getAsBlocks(subnetBlock)\n poolBlocks = subnetSpecBlocks['pool']\n sHostBlocks = subnetSpecBlocks['host'] \n for poolBlock in poolBlocks:\n pool = DHCPDConfigParser.extractPool(poolBlock)\n poolSpecBlocks = DHCPDConfigParser.getAsBlocks(poolBlock)\n pHostBlocks = poolSpecBlocks['host'] \n for hostBlock in pHostBlocks:\n host = DHCPDConfigParser.extractHost(hostBlock)\n pool.hosts.append(host)\n subnet.pools.append(pool)\n \n for hostBlock in sHostBlocks:\n host = DHCPDConfigParser.extractHost(hostBlock)\n subnet.hosts.append(host)\n\n conf.subnets.append(subnet)\n return conf\n\n @staticmethod\n def extractHost(hostBlock): \n host = Host()\n host.name = re.search('host +([-\\w.]*)?[\\s{]*',hostBlock).group(1)\n host.mac = re.search('hardware +ethernet +(\\S*)? *;',hostBlock).group(1)\n host.ip = re.search('fixed-address +(\\S*)? *;',hostBlock).group(1) \n return host\n\n @staticmethod\n def extractPool(poolBlock): \n pool = Pool() \n m = re.search('range (\\S+)? +(\\S+)? *;',poolBlock) \n pool.rangeStart = m.group(1)\n pool.rangeEnd = m.group(2)\n pool.bootfileName = re.search('option +bootfile-name +\"(\\S*)?\"',poolBlock).group(1)\n pool.subnetMask = re.search('option +subnet-mask +(\\S*)? *;',poolBlock).group(1)\n pool.routers = re.search('option +routers +(\\S*)? *;',poolBlock).group(1)\n if pool.bootfileName :\n pool.status = 'Assigned'\n return pool\n\n @staticmethod\n def extractSubnet(subnetBlock):\n subnet =Subnet()\n line = subnetBlock[:subnetBlock.index('\\n')]\n items = re.split('\\s+',line)\n subnet.subnet = items[1]\n subnet.netmask = items[3]\n return subnet\n\n\n @staticmethod\n def getAsBlocks(data):\n subnetBlocks =[]\n poolBlocks = []\n hostBlocks =[]\n ci = 1\n pi = 1\n while ci!=-1 and ci<len(data): \n line = data[pi:ci] \n ri = 0\n if line.startswith('subnet'): \n [ri,subnetblock] = DHCPDConfigParser.extractBlock(data[pi:])\n subnetBlocks .append(subnetblock)\n elif line.startswith('pool'):\n [ri,poolblock] = DHCPDConfigParser.extractBlock(data[pi:])\n poolBlocks .append(poolblock)\n elif line.startswith('host'):\n [ri,hostBlock] = DHCPDConfigParser.extractBlock(data[pi:])\n hostBlocks .append(hostBlock)\n try:\n if ri==0:\n pi = ci\n else:\n pi +=ri+1\n ci = data.index('\\n',pi+1)+1\n except Exception as e: \n ci =-1\n \n return {'subnet':subnetBlocks,'pool':poolBlocks,'host':hostBlocks}\n \n @staticmethod\n def extractBlock(data): \n si = data.index('{')\n ci = si\n if si==-1:\n return ''\n lc = 1\n rc = 0\n\n while lc > rc and ci < len(data):\n ci +=1\n c = data[ci]\n if c == '{':\n lc +=1\n elif c == '}':\n rc +=1\n return [ci+1,data[:ci+1]]\n \n @staticmethod\n def preprocess(data):\n processedData =''\n cIndex= 0\n commentStarted = False\n newLine = False\n\n while cIndex< len(data):\n c = data[cIndex]\n cIndex +=1\n if c=='#':\n commentStarted = True\n continue\n if c == '\\n':\n commentStarted = False\n newLine = True\n continue \n if commentStarted or (newLine and (c==' ' or c=='\\t')): \n continue\n\n if newLine:\n processedData +='\\n'\n newLine = False\n processedData +=c \n return processedData\n \n" }, { "alpha_fraction": 0.5619834661483765, "alphanum_fraction": 0.6363636255264282, "avg_line_length": 18.83333396911621, "blob_id": "f72b82c360aabeaec3d143aace54099ba8aeee16", "content_id": "f83d54f39b16ed3dc28b3e8db832d3edcd44522a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 121, "license_type": "no_license", "max_line_length": 49, "num_lines": 6, "path": "/cgi/cgi/v2/Lib/vmdkstorage/__init__.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "\n__author__='Pradeep CH'\n__version__ ='1.0.0'\n__date__= '29- Mar- 2017'\n\n\nfrom vmdkManger import getVMDKInfo, getVMBaseInfo\n\n" }, { "alpha_fraction": 0.567307710647583, "alphanum_fraction": 0.5831043720245361, "avg_line_length": 24.508771896362305, "blob_id": "76c4475ac21fd2e8eb8a49e6a0f1788474b5fab9", "content_id": "ed3870fae9f0d862d83aa8064b085cc300574c13", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1456, "license_type": "permissive", "max_line_length": 65, "num_lines": 57, "path": "/v2/js/command/pushimage.js", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "\n/*\nAuthor : Pradeep CH\nDate : 22-Feb-2017\nVersion : 1.0.0\nSince : 2.0.0\n*/\n\n\n$(document).on('click', '#submit', function (event) { \n loadResp('Pushing image. Please wait...')\n event.preventDefault();\n var files= $('#uploadImg')[0].files\n if(files.length==0){\n loadResp('No images selected')\n return\n }\n var file = files[0]\n var data = new FormData();\n data.append('imgFile', file)\n data.append('ips', $('#ips').val())\n data.append('uname', $('#username').val())\n data.append('password', $('#password').val()) \n data.append('cgipath', cgiPath) \n target = cgiPath +'command/imagepush.py'\n $.triggerPOSTCallWithoutContentType(target,data,loadPushResp);\n});\n$(document).on('change','#uploadImg',function(){\n var files= $('#uploadImg')[0].files\n if(files.length==0){\n $('#fileLabel').val('Select an image file');\n return\n }\n $('#fileLabel').val(files[0].name);\n});\nfunction loadPushResp(resp){\n if(resp===undefined){\n loadResp('Invalid resp')\n return\n }\n\n var obj = jQuery.parseJSON( resp );\n if(obj['status']=='error'){\n loadResp(obj['data'])\n }else{ \n content = \"\"\n keys = Object.keys(obj['data'])\n $(keys).each(function() { \n currentData = obj['data'][this] \n\t content += '<h4>'+this+'</h4>'\n\t content += '<pre>'+currentData+'</pre>'\n });\n loadResp(content)\n }\n}\nfunction loadResp(resp){\n\t $('#consoleContent').html(resp).fadeIn(600); \n}\n\n" }, { "alpha_fraction": 0.6060320734977722, "alphanum_fraction": 0.6145145893096924, "avg_line_length": 23.390804290771484, "blob_id": "ecb509aa7526a4446bca4e4454f74ce64e67feda", "content_id": "c7d1c935f333b1380c387755cd441401fae8cfa5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 2122, "license_type": "permissive", "max_line_length": 96, "num_lines": 87, "path": "/v2/js/scripter/executor.js", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "/*\nAuthor : Pradeep CH\nDate : 19-Jun-2017\nVersion : 1.0.0\nSince : 2.0.0\n*/\n\ninit();\n\nfunction init(){\n\tsid = getParamValue(document.location.href,'id') \n console.log(sid)\n console.log('data')\n\t//sname = getParamValue(document.location.href,'name') \n\tif(sid===undefined){\n\t\tloadResp('Invalid request..') \n\t\treturn\n\t}\n\t//$('#scriptName').val(sname)\n\t$('#scriptId').val(sid)\n \tloadResp('Loading script details...')\n loadScriptInfo(sid);\n}\n\nfunction loadScriptInfo(sid){\n\ttarget =cgiPath + 'scripter/scriptmanager.py'\n\tdata = {'action':'getScriptInfo','id':$('#scriptId').val()}\n\tmethod ='POST'\n\t$.triggerCall(target,method,data,loadScriptInfoResp)\n}\n\nfunction loadScriptInfoResp(r){\n if(r===undefined){\n loadResp('Invalid resp')\n return\n }\n var obj = jQuery.parseJSON( r );\n if(obj['status']==='success'){\n $('#scriptName').val(obj['data'].fileName)\n $('#arg').attr('placeholder',obj['data'].param)\n loadResp('Script info loaded successfully')\n } else{\n loadResp(obj['data'])\n }\n}\n//Actions \n$(document).on('click','#reset',function(){\n\t$('#ip').val('')\n\t$('#arg').val('')\n\t$('#username').val('')\n\t$('#password').val('')\n});\n\n$(document).on('click','#submit',function(){\n\tevent.preventDefault(); \n\tip = $('#ip').val()\n\tusername = $('#username').val()\n\tpassword = $('#password').val()\n\targ = $('#arg').val()\n\tid = $('#scriptId').val()\n\n\t$('#savekvmconsole').html('Saving server info. Please wait').fadeIn(600); \n\ttarget =cgiPath + 'scripter/scriptmanager.py'\n\tdata = {'action':'execute','ip':ip,'username':username,'password':password,'id':id,'arg':arg}\n\tmethod ='POST'\n\t$.triggerCall(target,method,data,loadActResp)\n\t$('#reset').click()\n});\n\nfunction loadActResp(resp){\n if(resp===undefined){\n loadResp('Invalid resp')\n return\n }\n var obj = jQuery.parseJSON( resp );\n console.log(obj['status']==='success')\n if(obj['status']==='success'){\n $('#reset').click();\n loadResp('Execution completed. Response : <pre>'+obj['data']+'</pre>') \n } else{\n \tloadResp(obj['data']) \n }\n}\n\nfunction loadResp(resp){\n\t$('#consoleContent').html(resp).fadeIn(600);\n}\n" }, { "alpha_fraction": 0.6446099877357483, "alphanum_fraction": 0.6472392678260803, "avg_line_length": 26.731706619262695, "blob_id": "f3b7cb9ead4e684097683d9d0fdebee66544da7d", "content_id": "bad0b41d8a1eda12f63e41525a4df5d771af8b83", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2282, "license_type": "no_license", "max_line_length": 146, "num_lines": 82, "path": "/cgi/cgi/v2/DeviceManager/DHCP/search.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\n__author__ ='Pradeep CH'\n\nimport sys\nimport os\nfrom os.path import expanduser \nimport json\n\n# Import modules for CGI handling \nimport cgi, cgitb \n\n\nsys.path.append('../../Lib')\nfrom sqldb import db \n\n\nsourcedirectory = expanduser(\"~\") + \"/pat/\"\nsourceFolder = sourcedirectory+'dhcpconfig/' \n \ndef readFile(ip):\n cfileName = sourceFolder +ip+'.conf'\n assert os.path.exists(cfileName),'Configuration file not found. Synch config for dhcp '+ str(ip) \n with open(cfileName,'r') as f:\n return f.read()\n\ndef performSearch(data,keyword):\n\tif keyword not in data:\n\t\treturn\n\tresp =''\n\tlineNum =0\n\tlines= data.split('\\n')\n\tfor line in lines:\n\t\tlineNum +=1\n\t\tif keyword in line:\n\t\t\tformatedLine = formatLine(line,keyword)\n\t\t\tresp += '<span class=\"lineNo\">Line %d :</span> %s</br>' %(lineNum,formatedLine)\n\treturn resp\n\ndef formatLine(line,searchKey):\n #return line.replace(searchKey,'<span id=\"highligh\" style=\"font-weight:bold\">'+ searchKey+'</span>')\n return line.replace(searchKey,'<span class=\"highligh\">'+ searchKey+'</span>')\n\n\ndef search(keyword): \n servers = db.getData('DHCPServers',['id','ip'])\n assert servers,'No DHCP info found'\n wholeData ={}\n ipUid ={}\n bodyContent =''\n for server in servers:\n data = readFile(server['ip'])\n wholeData[server['ip']]= data\n ipUid[server['ip']]= server['id']\n assert len(wholeData)>0,'There is no configuration to perform search'\n matchFound = False\n for ip,data in wholeData.items(): \n resp = performSearch(data,keyword)\n if resp:\n bodyContent +='<h4> Match found in Server <a target=\"blank\" href=\"/pat/v2/dhcp/manage.htm?ip=%s&uid=%s\"> %s</a></h4>' %(ip,ipUid[ip],ip)\n bodyContent += resp\n\t matchFound = True\n if not matchFound:\n bodyContent = '</br>Match not found :('\n return {'status':'success','data':bodyContent}\n\ndef main():\n # Create instance of FieldStorage \n form = cgi.FieldStorage() \n\n keyword = form.getvalue('keyword')\n assert keyword and keyword!='','Keyword cannot be empty' \n resp = search(keyword)\n print json.dumps(resp)\n\nif __name__ == \"__main__\":\n print 'Content-type:text/html\\r\\n'\n try:\n main()\n except Exception as e :\n print json.dumps({'status':'error','data':'%s' %str(e)})\n\n\n\n\n\t\n\n\n" }, { "alpha_fraction": 0.684355616569519, "alphanum_fraction": 0.6864231824874878, "avg_line_length": 19.72857093811035, "blob_id": "ca82efa76ab8a57964105f62e231fd1ca4d82ff1", "content_id": "633dfcdf6967e8e9c60bbb5bdb9fed03d0ea805c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1451, "license_type": "no_license", "max_line_length": 70, "num_lines": 70, "path": "/cgi/cgi/v2/deviceaction/deviceReset.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n# Import modules for CGI handling \nimport cgi, cgitb \n\nimport time\nimport re\nfrom Reload import ReloadDevices\nfrom htmlutil import html \nimport sys\nimport os\nsys.path.append('../Lib')\nfrom IPAddressManager import parser\n#\n#Author Pradeep CH\n#\n__author__ ='Pradeep CH'\n__version__='1.0.0'\n\n\ndef restartSingleDevice(ip,un,pwd):\n\tdeviceLoader = ReloadDevices()\n\ttry: \n\t\tif un and pwd:\n\t\t\treturn deviceLoader.realoadDevice(ip,un,pwd)\n\t\telse:\n\t\t\treturn deviceLoader.realoadDevice(ip) \n\texcept Exception as e:\n\t\treturn str(e)\n\n\t\t\ndef log(msg):\n\tprint msg+'<br/>'\n\ndef restartMultiple(ips,un,pwd):\n\tresp =''\n\tfor ip in ips:\n\t\tip = ip.strip()\n\t\tif ip == '':\n\t\t\tcontinue;\n\t\t#log('IP %s is trying to reset ' %ip)\n\t\tstat = restartSingleDevice(ip,un,pwd);\n\t\t#stat ='Success'\n\t\t#log('Restring IP %s completed with status %s' %(ip,stat))\n\t\tresp +='<br />' + 'IP %s : %s' %(ip,str(stat))\n\treturn resp\n\n# Create instance of FieldStorage \nform = cgi.FieldStorage() \n\n\nbodyContent = '' #html.getBackButton('/pat/DeviceManager/manager.htm')\n\n# Get mode from fields\n#mode= form.getvalue('mode')\n\nun = form.getvalue('username')\npwd = form.getvalue('pwd')\nips = form.getvalue('ips')\nif not ips:\n\tbodyContent +='No IPs provided'\nelse:\n\ttry: \n\t\tmips = parser.parse(ips) \n\t\tbodyContent +=restartMultiple(mips,un,pwd)\t\t\n\texcept Exception as e:\n\t\tbodyContent +='Excption : %s' %str(e)\n#print html\nhtml.printHeader('Device Reset')\nhtml.printBodyContent(bodyContent)\n" }, { "alpha_fraction": 0.6974318027496338, "alphanum_fraction": 0.6974318027496338, "avg_line_length": 20.11864471435547, "blob_id": "77faf85e2633ae21e2692b23bf93a2900d9d5258", "content_id": "9f3dd43f746edaccd14244ad7ec78334fa7cba2a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1246, "license_type": "no_license", "max_line_length": 75, "num_lines": 59, "path": "/cgi/cgi/v2/common/action.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\n__author__ ='Pradeep CH'\n\nimport sys\nimport os \nimport json\nimport cgi\n \ndef download(fileName,data):\t\n\ttry:\n\t\tdownloadFileName = fileName \n\t\tprint 'Content-Disposition: attachment; filename=\"%s\"' % downloadFileName\n\t\t#print \"Content-Length: \" + str(os.stat(fullPath).st_size)\n\t\tprint # empty line between headers and body\n\t\tprint data\n\n\texcept Exception as e:\n\t\tprint \"Content-type:text/html\\r\\n\\r\\n\"\n\t\tprint 'Unexpected error occured. %s' %str(e)\n\n\t\n#\ndhcpServerResourceFile = '../../../DHCPServers/servers'\n\ndef loadAllServers():\n\tdata =\"\"\n\twith open(dhcpServerResourceFile,'r') as f:\n\t\tdata = f.read()\n\tservers = {}\n\tif data:\n\t\tservers = json.loads(data)\n\treturn servers\n\ndef getServerInfo(ip):\n\tservers = loadAllServers()\n\tserverInfo ='No data found'\n\tfor server in servers:\n\t\tif server['ip']==ip:\n\t\t\tserverInfo = json.dumps(server)\n\tprint \"Content-type:text/html\\r\\n\\r\\n\"\n\tprint serverInfo\n\nform = cgi.FieldStorage() \n\naction = form.getvalue('action')\n\nif action == 'download':\n\tdata = form.getvalue('data')\n\tfileName = form.getvalue('fileName')\n\tdownload(fileName,data);\nelif action=='dhcpserverInfo':\n\tip = form.getvalue('ip')\n\tgetServerInfo(ip);\n\nelse:\n\tprint \"Content-type:text/html\\r\\n\\r\\n\"\n\tprint 'Invalid action'\n" }, { "alpha_fraction": 0.6272134184837341, "alphanum_fraction": 0.6439887881278992, "avg_line_length": 28.77777862548828, "blob_id": "796d4e0b4c25bf24948561a0bc23057b1de8c478", "content_id": "0e998d979d56b7190d56d7966acf6380ea496ece", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1073, "license_type": "permissive", "max_line_length": 116, "num_lines": 36, "path": "/v2/js/DeviceManager/manager.js", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "\n/*\nAuthor : Pradeep CH\nDate : 10-Feb-2017\nVersion : 1.0.0\nSince : 2.0.0\n*/\ncheckParam();\n\nfunction checkParam(){\n\tfromIP = getParamValue(document.location.href,'fromIP') \n\ttoIP = getParamValue(document.location.href,'toIP') \n\tif(fromIP===undefined || toIP===undefined || toIP==\"\" || fromIP==\"\"){\n\t\treturn\n\t} \n\t//if it's va;lid perform click\n\t$('#resetDeviceIPs').val(fromIP+'-'+toIP) \n\t//$('resetDevice').trigger( \"click\" );\n}\n\n$(document).on('click', '#resetDevice', function (event) { \n\t$('#deviceResetconsoleResult').html('Reset in progress. Please wait..').fadeIn(800)\n\ttarget =cgiPath + 'deviceaction/deviceReset.py'\n\tmethod ='POST'\n\tdata = {'username':$('#resetDeviceUsername').val(),'pwd':$('#resetDevicePassword').val(),'ips':$('#resetDeviceIPs').val()} \n\t$.triggerCall(target,method,data,loadResp)\t\t\t \n});\n\n$(document).on('click', '#resetDeviceReset', function (event) { \n\t$('#resetDeviceUsername').val(\"\")\n\t$('#resetDevicePassword').val(\"\")\n\t$('#resetDeviceIPs').val(\"\")\n});\n\nfunction loadResp(resp){\n\t$('#deviceResetconsoleResult').html(resp).fadeIn(200);\n}\n" }, { "alpha_fraction": 0.5384615659713745, "alphanum_fraction": 0.5427475571632385, "avg_line_length": 25.224851608276367, "blob_id": "f2010574b782b16de65085045413c69c71a9b8d7", "content_id": "b881a082450730a86203fa864ca02a6963b883cb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4433, "license_type": "no_license", "max_line_length": 226, "num_lines": 169, "path": "/cgi/cgi/v2/Lib/dhcpconfig/beans.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "__author__='Pradeep CH'\n__version__ ='1.0.0'\n__date__= '10- Mar- 2017'\n\nimport datetime\n\nclass Config(object):\n def __init__(self):\n self.subnets = []\n def getAsJson(self):\n subnetsJson = []\n for subnet in self.subnets:\n subnetsJson.append(subnet.getAsJson());\n return {'subnets':subnetsJson}\n\n def getAsConfig(self):\n subnetConfig = ''\n for subnet in self.subnets:\n subnetConfig += subnet.getAsConfig()\n\n config = '''\n#-----------------------------------\t\t\n#This config is generated by PAT 2.0 \n#Avoid editing this file manually\n#----------------------------------\n\t\t\ndefault-lease-time 600;\nmax-lease-time 7200;\nauthoritative;\nlog-facility local7;\nping-check true;\n\n#----------------- Subnets---------------\t\t\n\t%s \n#---------------End of Subnets ---------''' %subnetConfig\n\n return config\n\n#subnet\nclass Subnet(object):\n def __init__(self):\n self.subnet = \"\"\n self.netmask =\"\"\n self.hosts =[]\n self.pools = []\n\n def getAsJson(self):\n poolObject = []\n hostObject =[]\n for pool in self.pools:\n poolObject.append(pool.getAsJson())\n for host in self.hosts:\n hostObject.append(host.getAsJson())\n return {'subnet':self.subnet,'netmask':self.netmask,'hosts':hostObject,'pools':poolObject}\n\n def loadData(self,a):\n self.subnet=a['subnet']\n self.netmask = a['netmask']\n self.hosts = a['hosts']\n self.pools = a['pools']\n\n def getAsConfig(self):\n poolConfig = ''\n for pool in self.pools:\n poolConfig += pool.getAsConfig()\n\n hostConfig =''\n for host in self.hosts:\n hostConfig = host.getAsConfig()\n\n config = '''\nsubnet %s netmask %s {\n#------------------ Summary --------------------\n#Pools : %d\n#Hosts : %d\n#---------------- End Summary ------------------\n\n#----------- Hosts will be listed here----------\n%s\n#----------------- End of Hosts ----------------\n#---------- Pools will be listed here ----------\n%s\n#---------- End of Pools ----------\n\t\t\n}''' %(self.subnet,self.netmask,len(self.pools),len(self.hosts),hostConfig,poolConfig)\n return config\n\t\t\n#POOL\n\nclass Pool(object):\n def __init__(self):\n self.assignedDate = \"no details\"\n self.rangeStart = \"\"\n self.rangeEnd = \"\"\n self.bootfileName = \"\"\n self.subnetMask = \"\"\n self.routers = \"\"\n self.status = \"Unassigned\"\n self.hosts =[]\n\n def makeAssigned(self):\n self.assignedDate =str(datetime.datetime.now().date())\n\n def setAssigned(self,date):\n self.assignedDate =str(date)\n\n def loadData(self,a):\n self.assignedDate = a[\"assignedDate\"]\n self.rangeStart = a[ \"rangeStart\"]\n self.rangeEnd = a[ \"rangeEnd\"]\n self.bootfileName = a[\"bootfileName\"]\n self.subnetMask = a[\"subnetMask\"]\n self.routers = a[\"routers\"]\n self.status = a[\"status\"]\n self.hosts = a['hosts']\n\n\n def getAsJson(self):\n hostObject = []\n for host in self.hosts:\n hostObject.append(host.getAsJson())\n return {'rangeStart':self.rangeStart,'rangeEnd':self.rangeEnd,'bootfileName':self.bootfileName,'status':self.status,'subnetMask':self.subnetMask,'routers':self.routers,'hosts':hostObject,\"assignedDate\":self.assignedDate}\n def getAsConfig(self):\n config =''\n hostConfig = ''\n for host in self.hosts:\n hostConfig += host.getAsConfig()\n config = '''\n\tpool {\n\t\trange %s %s;\n \toption bootfile-name \"%s\";\n \toption subnet-mask %s;\n \toption routers %s;\n\t\t\t\n\t\t#------------- Summary ---------------------\n\t\t#Hosts : %d\n\t\t#------------- End of Summary --------------\n\n\t\t#-------------- Hosts-----------------------\n\t\t%s\n\t\t#-------------- End of hosts----------------\n\t\t\t\n\t}''' %(self.rangeStart,self.rangeEnd,self.bootfileName,self.subnetMask,self.routers, len(self.hosts),hostConfig)\n return config\n\n#Host\nclass Host(object):\n def __init__(self):\n self.name =''\n self.mac =''\n self.ip =''\n self.bootfileName = \"\"\n def getAsJson(self):\n return {'mac':self.mac,'ip':self.ip,'name':self.name}\n\n\n def loadData(self,a):\n self.name =a['name']\n self.mac =a['mac']\n self.ip =a['ip']\n self.bootfileName = a['bootfileName']\n\n def getAsConfig(self): \n config = '''\n\t\thost %s {\n \t\t\thardware ethernet %s;\n\t\t\tfixed-address %s;\n\t\t}''' %(self.name,self.mac,self.ip)\n return config\n\n" }, { "alpha_fraction": 0.6901408433914185, "alphanum_fraction": 0.6933911442756653, "avg_line_length": 19.065217971801758, "blob_id": "18a88d24467be314af93d8d0e80ef7f3c6fdac4a", "content_id": "ef4d4c33651de4a1b43e0b8d18b445dbdf0a1e87", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 923, "license_type": "no_license", "max_line_length": 74, "num_lines": 46, "path": "/cgi/cgi/v2/Lib/commandexecutor.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n \nimport sys\n\nimport cgi, cgitb \nimport pyeapi\nimport json\n\n##SSH\nimport paramiko\n\n\n__author__ = 'Pradeep CH'\n\n\ndef executeCommand(ip,cmds,un='cvpuser',pwd='root'): \n\n\tip = ip.strip()\n\tun = un.strip()\n\tpwd = pwd.strip()\n\n\tconnection = pyeapi.connect(host=ip,username=un,password=pwd,timeout=10) \n\tresp = connection.execute(cmds)\n\tresp = json.dumps(resp, indent=4) \n\treturn resp\n\ndef validateAuthentication(ip,un='cvpuser',pwd='root'): \n\texecuteCommand(ip,'show hostname',un,pwd) \n\treturn True\n\ndef executeCommandSSH(ip,cmds,un='cvpuser',pwd='root'):\n\n\tip = ip.strip()\n\tun = un.strip()\n\tpwd = pwd.strip()\n\t\n\tif type(cmds) is list:\n\t\tcmds = '\\n'.join(cmds)\n\n\tssh = paramiko.SSHClient()\n\tssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n\tssh.connect(ip, username=un,password=pwd)\n\tstdin, stdout, stderr = ssh.exec_command(cmds) \n\tdata = stdout.readlines()\n\tssh.close()\n\treturn data\n" }, { "alpha_fraction": 0.49618321657180786, "alphanum_fraction": 0.5410305261611938, "avg_line_length": 21.7608699798584, "blob_id": "82104f6f179f69947f45f8b27e671dc3a030b978", "content_id": "1d2b6d8a82f96ec5c34df7f29c90c04b83ed878c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1048, "license_type": "no_license", "max_line_length": 69, "num_lines": 46, "path": "/cgi/cgi/v2/Lib/IPAddressManager/IPManger.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python \nimport socket \nimport re \n__author__ ='Pradeep'\n\nclass IPAdressParcer(object):\n\tdef parse(self,data):\n\t\tips = []\n\t\tlines = re.split('[\\n\\s;,]+',data) \n\t\tfor line in lines:\n\t\t\tif '-' in line:\n\t\t\t\ttemp = line.split('-')\n\t\t\t\tassert len(temp)==2, 'Invalid range : %s' %line\n\t\t\t\tips.extend(self.getRange(temp[0],temp[1]))\n\t\t\telse:\n\t\t\t\tips.append(line)\n\t\treturn ips\n\n\tdef getRange(self,ipstart,ipend):\n\t\tips =[]\n\t\ttry:\n\t\t\ts = [int(numeric_string) for numeric_string in ipstart.split('.')]\n\t\t\te = [int(numeric_string) for numeric_string in ipend.split('.')]\n\t\texcept Exception as e:\n\t\t\tassert False, 'Invalid IP range'\n\t\n\t\tassert len(s)==4 and len(e)==4, 'Invalid IP Address.'\n \n\t\twhile not (e[0]==s[0] and e[1]==s[1] and e[2]==s[2] and e[3]<s[3]):\n\t\t\n\t\t\tip = '%d.%d.%d.%d'%(s[0],s[1],s[2],s[3])\n\t\t\tips.append(ip)\n\t\t\t#move to the next\n\t\t\ts[3] +=1\t\t\n\t\t\tif s[3]==255:\n\t\t\t\ts[2] +=1\n\t\t\t\ts[3] =1\n\t\t\tif s[2]==255:\n\t\t\t\ts[1] +=1\n\t\t\t\ts[2] =1\n\t\t\tif s[1]==255:\n\t\t\t\ts[0] +=1\n\t\t\t\ts[1] =1\n\t\t\tif s[0] >255 :\n\t\t\t\tbreak;\n\t\treturn ips \n" }, { "alpha_fraction": 0.52173912525177, "alphanum_fraction": 0.5298452377319336, "avg_line_length": 23.672727584838867, "blob_id": "5a4be87d4adb06b1f89eb932ab14b42a90568d96", "content_id": "bb529d6f8866cbe01eddf85f442ea3eea56de95a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": true, "language": "JavaScript", "length_bytes": 1357, "license_type": "permissive", "max_line_length": 82, "num_lines": 55, "path": "/v2/js/jqueryext.js", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "//Jquery extention file\n/*\n\nAuthor : Pradeep CH\nVersion : 1.0.0\nSince : 1.0.0\nDate : Sat Feb 4 2017\n\nThis file extends jquery to add two more feture to trigger post call and a API hit\n*/\n\n$.extend({\n \t\tredirectPost: function(location, args)\n \t\t{\n \t\tvar form = $('<form></form>');\n \t \t\tform.attr(\"method\", \"post\");\n \t\tform.attr(\"action\", location);\n \t\tform.attr(\"target\", 'default');\n\n \t\t$.each( args, function( key, value ) {\n \t\t\tvar field = $('<input></input>');\n\n \t\t\tfield.attr(\"type\", \"hidden\");\n \t\t\tfield.attr(\"name\", key);\n \t\t\tfield.attr(\"value\", value);\n\n \t\t\tform.append(field);\n \t\t});\n \t\t\t$(form).appendTo('body').submit();\n \t\t},\n\n\t\ttriggerCall : function(target,method,bodyContent,targetMethod){\n\t\t\t$.ajax({\n \t\t\t\turl: target,\n \t\t\t\ttype: method,\n \t\t\t\tdata: bodyContent,\n \t\t\t\tsuccess: function(response, status, xhr){ \n\t\t\t\t\ttargetMethod(response);\n\t\t\t\t}\n\t\t\t}); \n\t\t},\n\t\ttriggerPOSTCallWithoutContentType : function(target,bodyContent,targetMethod){\n\t\t\t$.ajax({\n\t\t\t\turl: target,\n \t\t\t\tdata: bodyContent,\n \t\t\t\tcache: false,\n \t\t\t\tcontentType: false,\n \t\t\t\tprocessData: false,\n \t\t\t\ttype: 'POST',\n \t\t\t\tsuccess: function(response, status, xhr){ \n\t\t\t\t\ttargetMethod(response);\n\t\t\t\t}\n\t\t\t}); \n\t\t}\n});\n" }, { "alpha_fraction": 0.6279069781303406, "alphanum_fraction": 0.6279069781303406, "avg_line_length": 16, "blob_id": "69b859ee683ddb62cb83cdf4b96f26c9289e1f6d", "content_id": "03c92d9d3efcb84b7c3ba3de5863b19e75ca8e7a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 86, "license_type": "no_license", "max_line_length": 25, "num_lines": 5, "path": "/cgi/cgi/v2/Lib/sqldb/__init__.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python \n__author__ ='Pradeep'\n\nfrom SQLLite import SQLDB\ndb = SQLDB() \n" }, { "alpha_fraction": 0.6337652206420898, "alphanum_fraction": 0.6398627758026123, "avg_line_length": 24.19230842590332, "blob_id": "4562c101f9de5c0375d68ea49516f7a2d13810db", "content_id": "56c7903fcd84b8dffe1ae3b289912176ef7bc142", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2624, "license_type": "no_license", "max_line_length": 119, "num_lines": 104, "path": "/cgi/cgi/v2/deviceaction/authenticationValidator.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "\nimport sys\n\n#Add path \nsys.path.append('../Lib') \nfrom IPAddressManager import parser\nfrom commandexecutor import validateAuthentication\n\nfrom htmlutil import html \n\n# Import modules for CGI handling \nimport cgi, cgitb \nimport re\nimport json\n\n\t\n# Create instance of FieldStorage \nform = cgi.FieldStorage() \n\n#header\n\n\n# Get mode from fields \n \nips = form.getvalue('ips') \nun = form.getvalue('username') \npwd = form.getvalue('pwd') \n\nbodyContent ='' \n\nsuccessIPS =[]\nfailedIPS = []\nfailedIPSWithCause =[]\ntotal =0\n\nif not ips:\n\t bodyContent +='IP address required' \nelse:\n\tif not un or not pwd:\n\t\tun ='cvpuser'\n\t\tpwd='root'\n\t\tbodyContent +='Password not provided. Using default username and password.'\n \ttry:\n\t\tips = parser.parse(ips)\n\texcept Exception as e:\n\t\tbodyContent +='<br><b>Invalid IP address range</b>'\n\t\tips=[]\n\tfor ip in ips: \n\t\tif ip.strip() =='':\n\t\t\tcontinue\n\t\ttotal +=1\n\t\tresp =''\n\t\ttry:\n\t\t\tvalidateAuthentication(ip,un,pwd)\n\t\t\tsuccessIPS.append(ip)\n\t\texcept Exception as e:\n\t\t\tfailedIPSWithCause.append('%s Cause : %s' %(ip,str(e))) \n\t\t\tfailedIPS.append(ip) \n \t#style\n\tbodyContent +='''<link rel=\"stylesheet\" href=\"/pat/v2/style/style.css\">\n\t\t<link rel=\"stylesheet\" href=\"/pat/v2/style/bootstrap.min.css\"> '''\n\tbodyContent +='<h3>Summary</h3>'\n\tbodyContent +='Total :%d</br>' %total \n\tbodyContent +='Success :%d</br>' %len( successIPS)\n\tbodyContent +='Failed :%d</br>' %len( failedIPS) \n\tbodyContent +='</br>'\n\n\tbodyContent +='<h3>Success IPs </h3>'\n\tbodyContent +='<br>Username : %s' %un\n\tbodyContent +='<br>Password : %s </br>' %pwd\n\tbodyContent +='<br>'.join(successIPS)\n\tbodyContent +='</br>'\n\tbodyContent +='</br>'\n\n\tif len(failedIPS)>0:\n\t\tbodyContent +='<h3>Failed IPS with Cause</h3>'\n\t\tbodyContent +='<br>'.join(failedIPSWithCause) \n\t\tbodyContent +='</br>'\n\n\t\tbodyContent +='<h4>Failed IPs</h4>'\n\t\tbodyContent +='<form action=\"/pat/cgi-bin/v2/deviceaction/authenticationValidator.py\" method=\"post\" target=\"_blank\">'\n\t\tbodyContent +='<label >%s</label>' %', '.join(failedIPS) \n\t\tbodyContent += '<input type=\"hidden\" name=\"ips\" value=\"%s\"/>' %','.join(failedIPS) \n\t\tbodyContent +='''\n\t\t\t<h4>Try with different credential</h4>\n\t\t\t<table>\t\t\t \n\t\t\t\t<tr>\n\t\t\t\t\t<td>Username :</td>\n\t\t\t\t\t<td><input type=\"text\" name=\"username\" /> </td>\n\t\t\t\t\t<td>Required</td>\n\t\t\t\t</tr>\n\t\t\t\t<tr>\n\t\t\t\t\t<td>Password :</td>\n\t\t\t\t\t<td><input type=\"password\" name=\"pwd\" /> </td>\n\t\t\t\t\t<td>Required</td>\n\t\t\t\t</tr>\n\t\t\t</table>\n\t\t\t</br> \n\t\t\t<input type=\"submit\" value=\"Authenticate\" />\n \t\t<input type=\"reset\" />\n\t\t\t'''\n\nhtml.printHeader('Authetication Validation')\n\nhtml.printBodyContent(bodyContent)\n\n \n" }, { "alpha_fraction": 0.5932504534721375, "alphanum_fraction": 0.614564836025238, "avg_line_length": 20.576923370361328, "blob_id": "ffddcdf94d403f00c77f276f03bcde072a0c33d9", "content_id": "9613e737467f6c81f8587dc89f0bc2fa4a528467", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 563, "license_type": "no_license", "max_line_length": 54, "num_lines": 26, "path": "/cgi/cgi/v2/Lib/vmactions/vmmanager.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n#################################\n\n# Author : Pradeep CH\n# Version : 1.0.0\n# Since : PAT 2.0.0\n# Date : Apr-06-2017\n\n#################################\n\nimport sys\n\nsys.path.append('../')\n\nfrom ParamikkoUtil import executeCommand\n\nstartCommandFormat = 'vim-cmd vmsvc/power.on %s'\nstopCommandFormat = 'vim-cmd vmsvc/power.off %s' \n\ndef start(ip,un,pwd,moid): \n #executeCommand(ip,username,password,cmd):\n executeCommand(ip,un,pwd,startCommandFormat %moid) \n\ndef stop(ip,un,pwd,moid):\n executeCommand(ip,un,pwd,stopCommandFormat %moid) \n\n" }, { "alpha_fraction": 0.594936728477478, "alphanum_fraction": 0.6308016777038574, "avg_line_length": 20.545454025268555, "blob_id": "77e042d21982b5148ff7307b340ffaea06e43f4c", "content_id": "562a575e47075d2fdd94b4e63235d86c5955f59d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 474, "license_type": "no_license", "max_line_length": 61, "num_lines": 22, "path": "/cgi/cgi/v2/Lib/commonutil.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python2\n\n\n#Author : Pradeep CH\n#Date : 10-Feb-2017\n#Version : 1.0.0\n#Since : 2.0.0\n\n\nimport uuid\nimport hashlib\n\ndef getRandomId():\n return str(uuid.uuid4())\n\ndef getMD5(file_name): \n # Open,close, read file and calculate MD5 on its contents \n with open(file_name,'rb') as file_to_check:\n # read contents of the file\n data = file_to_check.read() \n # pipe contents of the file through\n return hashlib.md5(data).hexdigest()\n" }, { "alpha_fraction": 0.6755006909370422, "alphanum_fraction": 0.6765122413635254, "avg_line_length": 26.131868362426758, "blob_id": "209394171d4cc89d51bbc221c6fd627e7a4b81fb", "content_id": "ec273bcfe7e88f78fc1ebc12781ff5920ad44e0a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4943, "license_type": "no_license", "max_line_length": 87, "num_lines": 182, "path": "/cgi/cgi/v2/DeviceManager/Hypervisor/vmwaremanager.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python2\n \nimport sys\nimport os\nfrom os.path import expanduser \nimport re\nimport json\n\nfrom hypercommons import getServerInfoById\n\nsys.path.append('../../Lib')\nfrom commonutil import getRandomId\n \nfrom vmactions import getAllVms,getVMInfo\nfrom vmactions import triggerRemoteConsole \nimport vmactions\n\nfrom vmdkstorage import getVMBaseInfo\n\nimport cgi, cgitb \n\n__author__ ='Pradeep CH'\n__version__='1.0.0'\n \n\ndef loadDevices(ip,un,pwd):\n\tfullresp = {} \n\ttry:\n\t\tvms = getAllVms(ip,un,pwd)\n\t\tif(not vms or len(vms)==0):\n\t\t\tfullresp ={'status':'error','data':'No VM(s) found :('}\n\t\telse:\n\t\t\tfullresp ={'status':'success','data':vms}\n\t\t\t\n\texcept Exception as e:\n\t\tfullresp ={'status':'error','data':'Something went wrong. Details :'+str(e)}\n\treturn fullresp\n\ndef start(ip,un,pwd,moid): \n\treturn startAll(ip,un,pwd,moid) \n\ndef stop(ip,un,pwd,moid): \n\treturn stopAll(ip,un,pwd,moid) \n\ndef restart(ip,un,pwd,vmname): \n\treturn restartAll(ip,un,pwd,moid) \n\ndef stopAll(ip,un,pwd,moids):\n\tresp = ''\n\tmoids = moids.split(',')\n\tfor moid in moids:\n\t\ttry: \n\t\t\tvmactions.stop(ip,un,pwd,moid)\n\t\t\tresp +='<br>Stop request success.' \n\t\texcept Exception as e:\n\t\t\tresp +='<br>Stop request failed.' \n\treturn {'status':'success','data':resp}\n\ndef restartAll(ip,un,pwd,moids):\n\tresp = ''\n\tmoids = moids.split(',')\n\tfor moid in moids:\n\t\ttry: \n\t\t\tvmactions.stop(ip,un,pwd,moid)\n\t\t\tresp +='<br>Stop request success.' \n\t\t\tvmactions.start(ip,un,pwd,moid)\n\t\t\tresp +='<br>Start request success.' \n\t\texcept Exception as e:\n\t\t\tresp +='<br>Restart request failed.' \n\treturn {'status':'success','data':resp}\n\ndef startAll(ip,un,pwd,moids):\n\tresp = ''\n\tmoids = moids.split(',') \n\tfor moid in moids:\n\t\ttry:\n\t\t\tvmactions.start(ip,un,pwd,moid)\n\t\t\tresp +='<br>Start request success.' \n\t\texcept Exception as e:\n\t\t\tresp +='<br>Start request failed.' \n\treturn {'status':'success','data':resp} \n\n#for internal purpose only\ndef readOrginalServer(uid): \n\treturn getServerInfoById(uid) \n\ndef readServer(uid):\n\tserver = readOrginalServer(uid)\n\tif server:\n\t\tserver['password'] = ''\n\t\treturn {'status':'success','data':server}\n\t\t\n\telse:\n\t\treturn {'status':'error','data':'no data'}\n\ndef getCurrentServer(uid):\n\tserver = readOrginalServer(uid)\n\tassert server, 'Server info not found'\n\treturn server\n\ndef showConsole( clientIp, ip,un,pwd,moid):\n\ttry:\n\t\ttriggerRemoteConsole(clientIp,ip,un,pwd,moid)\n\t\treturn {'status':'success','data':'Triggered successfully.'}\n\texcept Exception as e:\n\t\treturn {'status':'error','data':str(e)}\n\ndef getDeviceInfo(ip,un,pwd,dname):\n\ttry:\n\t\tdata = getVMInfo(ip,un,pwd,dname)\n\t\treturn {'status':'success','data':data}\t\n\texcept Exception as e:\n\t\treturn {'status':'error','data':str(e)}\ndef getDeviceInfoMAC(ip,un,pwd,dname):\n\ttry:\n\t\tdata = getVmInfoWithMac(ip,un,pwd,dname)\n\t\treturn {'status':'success','data':data}\t\n\texcept Exception as e:\n\t\treturn {'status':'error','data':str(e)}\t\t\n\ndef main():\n\tform = cgi.FieldStorage() \n\tresp =''\n\tact = form.getvalue('action') \n\t\n\tuid = form.getvalue('uid')\n\t \n\n\tip = form.getvalue('ip')\n\tun = form.getvalue('uname')\n\tpwd =form.getvalue('pwd')\n\tidentifier = form.getvalue('identifier')\n\n\t#TODO validate\n\tif act=='loadDevices':\n\t\tserver = getCurrentServer(uid)\n\t\tresp =loadDevices(server['ip'],server['username'],server['password']) \n\telif act == 'stop':\n\t\tserver = getCurrentServer(uid)\n\t\tmoid = \tform.getvalue('moid') \n\t\tresp =stop(server['ip'],server['username'],server['password'],moid)\n\telif act == 'start':\n\t\tserver = getCurrentServer(uid)\n\t\tmoid = form.getvalue('moid')\n\t\tresp =start(server['ip'],server['username'],server['password'],moid)\n\telif act=='restart':\n\t\tserver = getCurrentServer(uid)\n\t\tmoid = \tform.getvalue('moid') \n\t\tresp =restart(server['ip'],server['username'],server['password'],moid) \n\telif act =='startall':\n\t\tserver = getCurrentServer(uid)\n\t\tmoids = form.getvalue('moids') \n\t\tresp =startAll(server['ip'],server['username'],server['password'],moids)\n\telif act =='stopall':\n\t\tserver = getCurrentServer(uid)\n\t\tmoids = form.getvalue('moids') \n\t\tresp =stopAll(server['ip'],server['username'],server['password'],moids)\n\telif act =='restartall':\n\t\tserver = getCurrentServer(uid)\n\t\tmoids = form.getvalue('moids') \n\t\tresp =restartAll(server['ip'],server['username'],server['password'],moids) \n\telif act=='console':\n\t\tclientIp = cgi.escape(os.environ[\"REMOTE_ADDR\"])\n\t\tserver = getCurrentServer(uid)\n\t\tmoid = \tform.getvalue('moid') \n\t\tresp = showConsole( clientIp,server['ip'],server['username'],server['password'],moid)\n\telif act=='loadDeviceInfo':\n\t\tserver = getCurrentServer(uid)\n\t\tdevicename = form.getvalue('deviceName') \n\t\tresp = getDeviceInfo(server['ip'],server['username'],server['password'],devicename) \n\telse:\n\t\tresp = {'status':'error','data':'Invalid action'} \n\t\n\tresp = json.dumps(resp)\t\t\n\tprint resp\n\nif __name__ == \"__main__\":\n print 'Content-type:text/html\\r\\n'\n try:\n main()\n except Exception as e :\n print json.dumps({'status':'error','data':'%s' %str(e)})\n \n\n\n" }, { "alpha_fraction": 0.5735963582992554, "alphanum_fraction": 0.6115326285362244, "avg_line_length": 27.65217399597168, "blob_id": "1e33cbb891b5e895ae5ec4db0dbf719f84920eb8", "content_id": "c82960ec44bf6c5c770e7576a9c9c229ec2dfa10", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 659, "license_type": "permissive", "max_line_length": 101, "num_lines": 23, "path": "/v2/js/ssh/remotessh.js", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "/*\nAuthor : Pradeep CH\nDate : 10-Feb-2017\nVersion : 1.0.0\nSince : 2.0.0\n*/\n\n$(document).on('click', '#doremotessh', function (event) { \n\t$('#remoteSSHconsoleResult').html(\"\").fadeOut(100,function(){$('#remoteSSHloading').fadeIn(1000);});\n\ttarget =cgiPath + 'ssh/remotessh.py'\n\tmethod ='POST'\n\tdata = {'username':$('#username').val(),'ip':$('#ip').val()} \n\t$.triggerCall(target,method,data,loadResp)\t\t\t \n});\n\n$(document).on('click', '#resetremotessh', function (event) { \n\t$('#username').val(\"\")\n\t$('#ip').val(\"\")\n});\n\nfunction loadResp(resp){\n\t$('#remoteSSHloading').fadeOut(800,function(){$('#remoteSSHconsoleResult').html(resp).fadeIn(200)})\n}\n" }, { "alpha_fraction": 0.6472290754318237, "alphanum_fraction": 0.6485313177108765, "avg_line_length": 34.33759689331055, "blob_id": "21fee1ca3c2be50f03c99f9e59ec07f1450e736e", "content_id": "10e9b30f25e52b1809227c422619711b02362a9e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13822, "license_type": "no_license", "max_line_length": 161, "num_lines": 391, "path": "/cgi/cgi/v2/DeviceManager/Device/manager.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python2\n \nimport sys\nimport os\nfrom os.path import expanduser \nimport re\nimport json\nfrom threading import Thread\nimport uuid\nimport cgi, cgitb \nimport datetime\nsys.path.append('../../Lib')\n \nfrom sqldb import db\nfrom beans import EnvInfo\nfrom usermanager import authenticate\n\nfrom dhcpconfig import Config,Pool,Host,Subnet\nfrom ParamikkoUtil import checkAuthentication,readRemoteFile,executeCommand, copyToRemote\nfrom vmactions import getVMInfo,start,stop\n\nsourcedirectory = expanduser(\"~\") + \"/pat/\"\nconfigdirectory = sourcedirectory +'dhcpconfig/'\nallocationLogLocation = sourcedirectory +'/pat/logs/allocationlog/'\n\ncopyCommand = 'yes | cp -rf %s %s' \nvmdkFlatFileFormat ='%s-flat.vmdk'\nvmdkFileFormat ='%s.vmdk'\nremoveComand = 'rm -rf %s'\nvmdkConvertCommandFormat = 'vmkfstools -i %s -d eagerzeroedthick %s'\n\n__author__ ='Pradeep CH'\n__version__='1.0.0'\n\ndef log(msg):\n with open('log','a') as f:\n f.write('\\n'+ str(datetime.datetime.now())+':'+ msg)\n\ndef logToFile(fileName,msg):\n #log(msg)\n with open(fileName,'a') as f:\n f.write('\\n'+ str(datetime.datetime.now())+':'+ msg)\n\n\ndef getAllServers(): \n s = EnvInfo()\n #def getData(self,tablename,keyset,key=None,keyidentifier = ''):\n return db.getData('EnvInfo',['id','identifier','phase','team','ip','bootstrap']) \n\ndef getEnv(envs,envId):\n for env in envs:\n if env['id'] == envId:\n return env\n\ndef executeDHCPCommand(ip,username,password,cmd): \n try:\n resp = executeCommand(ip,username,password,cmd); \n return {'status':'success','data': 'Response :' + str(resp)}\n except Exception as e:\n return {'status':'error','data': 'Unexpected error occured. %s' %str(e)}\n\ndef getEnvInfo(envs,envId):\n if envs==None:\n return ['-','-','-','-']\n env =getEnv(envs,envId)\n if env:\n return [env['identifier'],env['phase'],env['team'],env['ip']]\n return ['Not found','-','-','Not found']\n\n \ndef getPools():\n try:\n envs= getAllServers()\n data= db.getNamedTrigger('POOLINFO')\n for pool in data:\n envId = pool['assignedEnv']\n [envIdentifier,phase,team,ip] = getEnvInfo(envs,envId)\n pool['envIdentifier'] = envIdentifier\n pool['envteam'] = team\n pool['envphase'] = phase\n pool['envip'] = ip\n return {'status':'success','data':data}\n except Exception as e:\n return {'status':'error','data':str(e)}\n\ndef getPool(uid):\n data = db.getData('DHCPPools',['id','serverIp','rangeEnd','rangeStart','status','devicecount','hypervisor'],uid,'id') \n assert data and len(data)==1,'invalid data recived from database'\n return data[0]\n\ndef getPoolInfo(uid):\n try:\n data = getPool(uid)\n return {'status':'success','data':data}\n except Exception as e:\n return {'status':'error','data':str(e)}\n\ndef mapHyperisor(uid,hypervisor):\n try:\n mapper = {'hypervisor':hypervisor}\n db.updateEntry(mapper,'DHCPPools',uid,'id');\n return {'status':'success','data':'Changes saved.'}\n except Exception as e:\n return {'status':'error','data':str(e)}\n\ndef assign(envId,uid,santizeReq,numberofdays,dhcpIp,dhcpun,dhcppwd,logFile):\n #Convert to full path\n logFile = allocationLogLocation + logFile\n try: \n logToFile(logFile,'Assign initated..')\n logToFile(logFile,'Reading env info..')\n envs= getAllServers()\n env = getEnv(envs,envId)\n logToFile(logFile,'Reading env info completed')\n logToFile(logFile,'Reading Pool info..')\n pool = getPool(uid) \n assert pool, 'Pool info not found'\n assert env, 'Envirnment details not found'\n\n logToFile(logFile,'Reading Pool Completed')\n logToFile(logFile,'Updating database..')\n\n assignedDate = str(datetime.datetime.now().date())\n assignEndDate = str((datetime.datetime.now()+ datetime.timedelta(days=int(numberofdays))).date())\n mapper ={'bootfileName':env['bootstrap'],'status':'Assigned','assignedDate':assignedDate,'assignEndDate':assignEndDate,'assignedEnv':envId}\n db.updateEntry(mapper,'DHCPPools',uid,'id');\n logToFile(logFile,'Updating database completed')\n\n logToFile(logFile,'Making changes to DHCP server')\n generateAndPushConfig(dhcpIp,dhcpun,dhcppwd)\n logToFile(logFile,'Making changes to DHCP server completed')\n \n \n sanmsg = 'No sanitazation'\n if santizeReq:\n logToFile(logFile,'Sanitizing initiated')\n hostCondition = 'serverIp like \"%s\" and poolId like \"%s\"' %(pool['serverIp'],pool['id'])\n hosts = db.getDataWithCondition('DHCPHosts',['mac','ip','name'],hostCondition) \n assert hosts,'There is no device found for host'\n sanmsg = santize(hosts,pool['hypervisor'],logFile)\n logToFile(logFile,'Sanitizing completed')\n\n\n return {'status':'success','data':'Assign completed. Sanitization info :'+sanmsg}\n except Exception as e:\n return {'status':'error','data':str(e)}\n\ndef generateAndPushConfig(ip,dhcpun,dhcppwd):\n condition ='serverIp like \"%s\"' %ip \n config = Config()\n #def getDataWithCondition(self,tablename,keyset,condition):\n #load data\n subnets = db.getDataWithCondition('DHCPSubnets',['subnet','netmask','id'],condition)\n cSubnets =[]\n for subnet in subnets:\n poolCondition = 'serverIp like \"%s\" and subnetId like \"%s\"' %(ip,subnet['id'])\n pools = db.getDataWithCondition('DHCPPools',[\"id\",\"assignedDate\",'rangeStart','rangeEnd','bootfileName','subnetMask','routers','status'],poolCondition)\n cPools = []\n for pool in pools:\n hostCondition = 'serverIp like \"%s\" and poolId like \"%s\"' %(ip,pool['id'])\n hosts = db.getDataWithCondition('DHCPHosts',['mac','ip','name'],hostCondition)\n cHosts = []\n for host in hosts:\n host['bootfileName'] =''\n cHost = Host()\n cHost.loadData(host)\n cHosts.append(cHost)\n cPool = Pool()\n pool['hosts'] = cHosts\n cPool.loadData(pool)\n cPools.append(cPool)\n cSubnet = Subnet()\n subnet['pools'] = cPools\n subnet['hosts'] = []\n cSubnet.loadData(subnet)\n cSubnets.append(cSubnet)\n config.subnets = cSubnets\n localPath = configdirectory + ip+'.conf'\n with open(localPath,'w') as f:\n f.write(config.getAsConfig())\n assert os.path.exists(localPath),'Configlet file write failed'\n\n copyToRemote(ip,dhcpun,dhcppwd,localPath,'/etc/dhcp/dhcpd.conf')\n executeDHCPCommand(ip,dhcpun,dhcppwd,'service dhcpd restart') \n\n pass\n\ndef release(uid,santizeReq,dhcpip,dhcpun,dhcppwd,logFile):\n #Convert to full path\n logFile = allocationLogLocation + logFile\n logToFile(logFile,'Release initiated...')\n\n try: \n mapper ={'bootfileName':'','status':'Not Assigned','assignedDate':'-','assignEndDate':'-','assignedEnv':'-'}\n logToFile(logFile,'Updating database..')\n db.updateEntry(mapper,'DHCPPools',uid,'id');\n logToFile(logFile,'Updating database completed')\n logToFile(logFile,'Updating DHCP config...')\n generateAndPushConfig(dhcpip,dhcpun,dhcppwd)\n logToFile(logFile,'Updating DHCP completed')\n\n if santizeReq:\n logToFile(logFile,'Sanitizing initiated')\n hostCondition = 'serverIp like \"%s\" and poolId like \"%s\"' %(pool['serverIp'],pool['id'])\n hosts = db.getDataWithCondition('DHCPHosts',['mac','ip','name'],hostCondition) \n assert hosts,'There is no device found for host'\n sanmsg = santize(hosts,pool['hypervisor'],logFile)\n logToFile(logFile,'Sanitizing completed')\n logToFile(logFile,'Release completed')\n return {'status':'success','data':'Release completed.'}\n except Exception as e:\n logToFile(logFile,'Release failed %s' %str(e))\n return {'status':'error','data':str(e)}\n\ndef getHyperVisiorByIp(ip):\n condition ='ip like \"%s\"' %ip\n data = db.getDataWithCondition('Hypervisors',['id','ip','username','password','type'],condition)\n assert len(data)==1,'No hypervisor info found'\n return data[0]\n\ndef resetVm(vmname,hyperVisor):\n resp ='\\nSanitizing ' + str(vmname)\n ip = hyperVisor['ip']\n un = hyperVisor['username']\n pwd = hyperVisor['password']\n vm = getVMInfo(ip,un,pwd,vmname)\n resp +='Stoping VM'\n try:\n stop(ip,un,pwd,vm['moid'])\n except Exception as e:\n resp = 'Stop failed' + str(e)\n\n assert vm, 'VM info not found'\n \n vmDir = '/vmfs/volumes/%s/%s/' %(vm['datastore'],vmname)\n\n vmdkFile = vmDir + vmdkFileFormat %vmname\n vmdkFlatFile= vmDir + vmdkFlatFileFormat %vmname\n \n #delete the vmdk file\n try:\n cmd = removeComand%vmdkFile \n log('Executing command '+ cmd)\n #resp +=\n r = executeCommand(ip,un,pwd,cmd) \n log(str(r))\n resp +='\\nRemoving old file ref : Success'\n except Exception as e:\n resp +='\\nRemoving old file ref : Failed, ' + str(e)\n\n #delete the flat file\n executeCommand(ip,un,pwd,cmd)\n try:\n cmd = removeComand%vmdkFlatFile \n log('Executing command '+ cmd)\n #resp +=\n r = executeCommand(ip,un,pwd,cmd) \n log(str(r))\n resp +='\\nRemoving old file : Success'\n except Exception as e:\n resp +='\\nRemoving old file : Failed, ' + str(e)\n \n eggFilePath = '/vmfs/volumes/DATASTORE02/ISO/vEOS-65.1-disk1.vmdk'\n\n #extract the file\n try:\n cmd = vmdkConvertCommandFormat %(eggFilePath,vmdkFile)\n log('Executing command '+ cmd)\n #resp +=\n r = executeCommand(ip,un,pwd,cmd) \n log(str(r))\n resp +='\\nReplacing with new file : Success'\n except Exception as e:\n resp +='\\nReplacing with new file, ' + str(e) \n\n resp +='\\nstarting VM'\n try:\n start(ip,un,pwd,vm['moid'])\n except Exception as e:\n resp = '\\nStart failed' + str(e) \n return resp\n\n\ndef santize(devices,hyperVisorIp,logFile):\n msg = ''\n try: \n logToFile(logFile,'Reading hypervisior info...')\n hyperVisor = getHyperVisiorByIp(hyperVisorIp)\n logToFile(logFile,'Reading hypervisior info completed')\n #TODO add condition to check the gypervisor\n for device in devices:\n try:\n logToFile(logFile,'Sanitizing device :%s initiated..' %device['name'])\n msg += resetVm(device['name'],hyperVisor)\n logToFile(logFile,msg)\n logToFile(logFile,'Sanitizing device :%s completed' %device['name'])\n except Exception as e: \n msg +='\\nDevice %s failed to sanitize. Cause %s' %(device['name'],str(e))\n logToFile(logFile,msg)\n logToFile(logFile,'Sanitizing device :%s failed' %device['name'])\n except Exception as e:\n msg = \"Error \" + str(e)\n logToFile(logFile,msg)\n return msg\n\ndef getDHCPLoginInfo(ip):\n condition = 'ip like \"%s\"' %ip\n data = db.getDataWithCondition('DHCPServers',['username','password','id'],condition)\n assert len(data)==1,'No DHCP info found'\n data = data[0]\n return [data['username'],data['password']]\n\ndef saveLog(msg,user,logfile):\n try:\n date = str(datetime.datetime.now())\n db.add('AllocationHistory',{'logdate':date,'user':user,'log':msg,'detailLogFile':logfile}\n except Exception as e:\n log(str(e))\n pass\n\ndef readLog(logFile):\n localPath = allocationLogLocation+ logFile\n assert os.path.exists(localPath),'There is no log asssociated for the request ' + str(logFile)\n with open(localPath,'r') as f:\n return {'status':'success','data':f.read()} \n\ndef main():\n\tform = cgi.FieldStorage() \n\tact = form.getvalue('action') \n \n\tresp =''\n\t\n\t#TODO validate\n\tif act == 'getPools': \n\t\tresp = getPools()\n\telif act == 'getPool': \n\t\tuid = form.getvalue('uid') \n\t\tassert uid,'Invalid Pool reference'\n\t\tresp = getPoolInfo(uid)\n\telif act == 'savePoolHypervisor': \n\t\tuid = form.getvalue('uid') \n\t\thypervisor = form.getvalue('hypervisor') \n\t\tassert uid and hypervisor , 'All the fields are required'\n\t\tresp = mapHyperisor(uid,hypervisor)\n\telif act == 'savePoolEnv': \n\t\tuid = form.getvalue('uid') \n\t\tassignaction = form.getvalue('assignaction')\n\t\tusername = form.getvalue('username')\n\t\tpassword = form.getvalue('password')\n\t\tassert uid and assignaction,'Bad request'\n\t\tassert username and password,'Username and password cannot be null' \n\t\tassert authenticate(username,password), \"invalid credentials\"\n\t\tsanitize = form.getvalue('sanitize')\n\t\tserverip = form.getvalue('serverip')\n [dhcpUn,dhcpPwd] = getDHCPLoginInfo(serverip); \n\t\tif assignaction=='assign':\n\t\t\tenvironment = form.getvalue('environment')\n\t\t\tnumdays = str(form.getvalue('numdays'))\n\t\t\tassert numdays.isdigit(), 'Invalid number of days'\n\t\t\t#log \t\t\t\t\t\n\t\t\tlogFile = str(uuid.uuid4())\n\t\t\tmsg = 'Assign initated. Pool id: %s, Server : %s,Envirnment : %s, Sanitize :%s, numdays:%d' %(str(uid),str(serverip),str(environment), str(sanitize),numdays)\n\t\t\tsaveLog(msg,username,logFile)\n \t\t\tthread = Thread(target = assign, args = (uid,sanitize,numdays,serverip,dhcpUn,dhcpPwd,logFile, ))\n \t\t\tthread.start() \n\t\t\tresp = {'status':'success','data':'Assign initiated','log':logFile}\n\t\telif assignaction=='release':\n\t\t\tlogFile = str(uuid.uuid4())\n\t\t\tmsg = 'Release initated. Pool Id : %s, Server :%s, Sanitize :%s' %(str(uid),str(serverip), str(sanitize))\n\t\t\tsaveLog(msg,username,logFile)\t\n \t\t\tthread = Thread(target = release, args = (uid,sanitize,serverip,dhcpUn,dhcpPwd,logFile, )) \n\t\t\tresp = {'status':'success','data':'Release initiated','log':logFile}\n\t\telse:\n\t\t\tassert False,'Invalid request'\n\telif act == 'showLog':\n\t\tlogfile = form.getvalue('logfile')\n\t\tassert logfile,'No records'\n\t\tresp = readLog(logfile)\n\n\telse:\n\t\tresp = {'status':'error','data':'Invalid action'} \n\t\n\tresp = json.dumps(resp)\t\t\n\tprint resp\n\nif __name__ == \"__main__\":\n print 'Content-type:text/html\\r\\n'\n try:\n main()\n except Exception as e :\n print json.dumps({'status':'error','data':'%s' %str(e)})\n \n\n\n" }, { "alpha_fraction": 0.6965277791023254, "alphanum_fraction": 0.6972222328186035, "avg_line_length": 30.28985595703125, "blob_id": "3ae4b85d23f6b2018275636e5650156b79f281f0", "content_id": "62a3a86ee87e514113207c38a9054474100255af", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4320, "license_type": "no_license", "max_line_length": 180, "num_lines": 138, "path": "/cgi/cgi/v2/DeviceManager/DHCP/dhcpConfigUpdater.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\n__author__ ='Pradeep CH'\n\nimport sys\nimport os\nfrom os.path import expanduser \nimport uuid\nimport json\n\nsys.path.append('../../Lib')\nfrom sshclient import SSHClient\nfrom sqldb import db\n\nfrom ParamikkoUtil import checkAuthentication,executeCommand, copyToRemote\n\nimport cgi\n\n\n\nsourcedirectory = expanduser(\"~\") + \"/pat/\"\nsourceFolder = sourcedirectory+'dhcpconfig/' \n\ndef getServer(uid): \n data = db.getData('DHCPServers',['ip','username','password'],uid,'id')\n assert len(data)==1,'No server info'\n return data[0] \n\n#Functions\ndef validateAuthentication(ip,username,password):\n\tprint \"Content-type:text/html\\r\\n\\r\\n\"\n\ttry: \n\t\tif checkAuthentication(ip,username,password):\n\t\t\treturn {'status':'success','data': 'Authentication Success'}\n\t\telse:\n\t\t\treturn {'status':'error','data': 'Authentication Failed'}\n\texcept Exception as e:\n\t\treturn {'status':'error','data': 'Unexpected error occured. %s' %str(e) }\n\ndef readConfigFile(ip):\n cfileName = sourceFolder+ip+'.conf'\n assert os.path.exists(cfileName),'Configuration file not found. Synch config for dhcp '+ str(ip) \n data = '' \n with open(cfileName,'r') as f:\n return f.read() \n return \n\ndef view(ip,username,password): \t\n return {'status':'success','data': readConfigFile(ip)} \n\ndef download(ip,username,password):\t\n\ttry:\n\t\tdownloadFileName = '%s_dhcpd.conf' %ip\n\t\tdata = readConfigFile(ip)\n\t\tprint 'Content-Disposition: attachment; filename=\"%s\"' % downloadFileName\n\t\t#print \"Content-Length: \" + str(os.stat(fullPath).st_size)\n\t\tprint # empty line between headers and body\n\t\tprint data\n\t\texit()\n\texcept Exception as e:\t\t\n\t\treturn {'status':'error','data': 'Unexpected error occured. %s' %str(e)}\n\ndef showStatus(ip,username,password):\n\treturn executeDHCPCommand(ip,username,password,'service dhcpd status')\n\ndef stop(ip,username,password):\n\treturn executeDHCPCommand(ip,username,password,'service dhcpd stop')\n\ndef start(ip,username,password):\n\treturn executeDHCPCommand(ip,username,password,'service dhcpd start')\n\ndef restart(ip,username,password):\n\treturn executeDHCPCommand(ip,username,password,'service dhcpd restart')\n\ndef executeDHCPCommand(ip,username,password,cmd): \n\ttry:\n\t\tresp = executeCommand(ip,username,password,cmd); \n\t\treturn {'status':'success','data': 'Response :' + str(resp)}\n\texcept Exception as e:\n\t\treturn {'status':'error','data': 'Unexpected error occured. %s' %str(e)}\n\ndef openConsole(ip,username,password):\n\tclientIp = cgi.escape(os.environ[\"REMOTE_ADDR\"])\n\tbodyContent =\"\"\n\tbodyContent += 'Your machine IP is %s <br>' %(str(clientIp)) \n\ttry:\n\t\tcl = SSHClient(clientIp)\n\t\tcl.enableSSH(ip,username)\n\t\treturn {'status':'success','data': 'SSH Successfull'}\n\texcept Exception as e:\n\t\treturn {'status':'error','data':'Could not connect to remote machine. Make sure the plugin is running in your machine, Cause : %s' %str(e)} \n\ndef saveRemoteFile(ip,un,pwd,data): \n\tsourceFile= str(uuid.uuid4())+'dhcpd.conf' \n\tresp = {}\n\ttry: \n\t\twith open(sourceFile,'w') as f:\n\t\t\tdata = f.write(data)\n\t\tcopyToRemote(ip,un,pwd,sourceFile,'/etc/dhcp/dhcpd.conf')\n\t\tresp = {'status':'success','data': 'Saved'}\n\texcept Exception as e:\n\t\tresp = {'status':'error','data': 'Could not commit the changes to remote machine. Cause : %s' %str(e)}\n\ttry:\n\t\tos.remove(sourceFile)\n\texcept:\n\t\tpass\n\treturn resp \n\ndef main():\n\tform = cgi.FieldStorage() \n \n\taction = form.getvalue('action')\n\tuid= form.getvalue('uid')\n\tassert uid,'Invalid request'\n\ttargetFunction = {'autheticate':validateAuthentication,\t'view':view,'download' : download,'stop':stop, \t\t'start':start,'status':showStatus,'restart':restart,'console':openConsole}\n \tresp ={}\n\tserver = getServer(uid)\n\n\tif 'remotesave' == action:\n\t\tdata = form.getvalue('config')\n\t\tassert data, 'No data to save'\n\t\tresp = saveRemoteFile(server['ip'],server['username'],server['password'],data)\n\telif action in targetFunction:\n\t\tfun = targetFunction[action]\n\t\tassert server,'Invalid identifier'\n\t\tresp = fun(server['ip'],server['username'],server['password']) \t\t\n\telse: \n\t\tresp = {'status':'error','data': 'Invalid Action'} \n \tprint 'Content-type:text/html\\r\\n'\n\tprint json.dumps(resp) \n\nif __name__ == \"__main__\":\t\n try:\n main()\n except Exception as e :\n print 'Content-type:text/html\\r\\n'\n print json.dumps({'status':'error','data':'Exception %s' %str(e)})\n\n\n" }, { "alpha_fraction": 0.6424216032028198, "alphanum_fraction": 0.657665491104126, "avg_line_length": 24.230770111083984, "blob_id": "fedf89e00c1e3b44c99e461808e5039cbddc6e59", "content_id": "cef94d0bbff5bbcb9e1d66edcfbc11b6afb8cd0a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 2296, "license_type": "permissive", "max_line_length": 126, "num_lines": 91, "path": "/v2/js/DeviceAllocator/DHCP/manager.js", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "/*\nAuthor : Pradeep CH\nDate : 23-May-2017\nVersion : 1.0.0\nSince : 2.0.0\n*/\n\n \ncheckParam();\n\nfunction checkParam(){ \n\tuid = getParamValue(document.location.href,'uid') \n\tip = getParamValue(document.location.href,'ip') \n\tif(ip===undefined ||uid===undefined ){\n\t\t$('#manageconsole').html('Invalid request!!').fadeIn(800)\n\t\treturn\n\t} \n\t$('#uid').val(uid)\n\t$('#actualIp').html(ip)\n $('#actualIp').attr('href','subnetView.htm?uid='+uid+'&ip='+ip)\n\t$('#manageconsole').html('Loading configuration. Please wait...').fadeIn(800)\n\tgetConfig(uid)\n}\n \nfunction getConfig(uid){ \n\ttarget =cgiPath + 'DeviceManager/DHCP/dhcpConfigUpdater.py'\n\tdata = {'action':'view','uid':uid}\n\tmethod ='POST'\n\t$.triggerCall(target,method,data,loadConfig)\n}\n\nfunction loadConfig(resp){ \n\tvar obj = jQuery.parseJSON( resp ); \n\tif(obj['status']=='error'){\n\t\tloadResp(obj['data']) \t\n\t}else{\n\t\tloadConfigForEdit(obj['data']) \t\n\t\tloadResp('Config loaded successfully');\n\t}\n\t\n}\n\nfunction loadConfigForEdit(c){\n\t$('#dconfig').val(c)\n}\n\nfunction loadResp(r){\n\t$('#manageconsole').html(r).fadeIn(800,function(){$('#manageconsole').fadeOut(5000)})\n}\n\n\n//Action\n\n$(document).on('click', '.action', function (event) { \n\t$('#manageconsole').html('Processing your request. Please wait...').fadeIn(500)\n\tevent.preventDefault(); \n\taction = $(this).attr('action')\n\tuid = $('#uid').val()\n\ttarget =cgiPath + 'DeviceManager/DHCP/dhcpConfigUpdater.py'\n\tif(action=='remotesave'){\n\t\tdata = {'action':action,'uid':uid,'config':$('#dconfig').val()}\n\t}else{\n\t\tdata = {'action':action,'uid':uid}\n\t}\n\tmethod ='POST'\n\t$.triggerCall(target,method,data,loadActionResp)\n});\n\n$(document).on('click', '.actionDownload', function (event) { \n\t$('#manageconsole').html('Processing your request. Please wait...').fadeIn(500,function(){$('#manageconsole').fadeOut(5000)})\n\tevent.preventDefault(); \n\taction = $(this).attr('action')\n\ttarget =cgiPath + 'DeviceManager/DHCP/dhcpConfigUpdater.py'\n\tdata = {'action':action,'uid':uid}\n\t$.redirectPost(target,data)\n});\n\nfunction loadActionResp(resp){\n\tif(resp==undefined){\n\t\tloadResp('Empty response.') \t\n\t\treturn;\n\t}\n\ttry{\n\t\tvar obj = jQuery.parseJSON( resp ); \n\t\tloadResp(obj['data']) \t\n\t}catch(ex){\n\t\tloadResp('Something went wrong. Invalid resp')\n\t\tconsole.log('Response')\n\t\tconsole.log(resp)\n\t}\n}\n" }, { "alpha_fraction": 0.5941605567932129, "alphanum_fraction": 0.6291970610618591, "avg_line_length": 23.464284896850586, "blob_id": "01e6515de561356f4bc2fafdf3fc532d2f4e34d1", "content_id": "f41ea8cad0853c9f56f7bd5db74b7c7c31c7491c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 685, "license_type": "no_license", "max_line_length": 47, "num_lines": 28, "path": "/cgi/cgi/v2/Lib/sshclient.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python \nimport socket \n__author__ ='Pradeep'\n\nclass SSHClient(object):\n\tdef __init__(self,host,port=1243):\n\t\tself.host= host\n\t\tself.port = port\n\t\n\tdef enableSSH(self,targetIP,targetUsername): \n\t\tcmd ='ssh %s@%s' %(targetUsername,targetIP)\n\t\tself.executeCommand(cmd) \n\n\tdef executeCommand(self,cmd):\n\t\ttry: \n\t\t\ts = socket.socket()\n\t\t\ts.connect((self.host, self.port))\n\t\t\ts.send(cmd)\n\t\t\tresp = s.recv(1024)\n\t\t\tif resp != 'Received':\n\t\t\t\traise Exception('Remote SSH Unsuccessfull')\n\t\t\ts.close() \n\t\texcept Exception as e:\n\t\t\traise Exception(str(e))\n\nif __name__ == \"__main__\":\n\tcl = SSHClient('192.168.5.9')\n\tprint cl.enableSSH('192.168.5.9','pradeep.k')\n" }, { "alpha_fraction": 0.6280992031097412, "alphanum_fraction": 0.6280992031097412, "avg_line_length": 22, "blob_id": "56bcb213b63af78681e0b728434067440614edc4", "content_id": "45e477eb596d722f8814706d2e0a4a7ea500df35", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 484, "license_type": "no_license", "max_line_length": 70, "num_lines": 21, "path": "/cgi/cgi/v2/Lib/htmlutil/HTML.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "\n__version__ = 'develop'\n__author__ = 'Pradeep CH'\n\n\nclass HTML(object):\n\tdef printHeader(self,title,contenttype='Content-type:text/html'):\n\t\tprint \"%s\\r\\n\\r\\n\" %contenttype\n\t\tprint \"<html>\"\n\t\tprint \"<head>\"\n\t\tprint \"<title>%s</title>\" %title\n\t\tprint \"</head>\"\n\n\tdef printBodyContent(self,content):\n\t\tprint \"<body>\"\n\t\tprint \"<body>\"\n\t\tprint content\n\t\tprint \"</body>\"\n\t\tprint \"</html>\"\n\n\tdef getBackButton(self,href):\n\t\treturn \"<a href='%s' style='color:blue'>Go back</a></br></br>\" %href\n" }, { "alpha_fraction": 0.6264404654502869, "alphanum_fraction": 0.629001259803772, "avg_line_length": 28.75238037109375, "blob_id": "484d5ee6480afe22eaa29da9ccb751342d32515e", "content_id": "1916a3457b3cd8d7a72f2ce1ac046d50cdc6efe0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3124, "license_type": "no_license", "max_line_length": 99, "num_lines": 105, "path": "/cgi/cgi/v2/command/imagepush.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n \nimport sys\n# Import modules for CGI handling \nimport cgi, cgitb \nimport re\nimport json\nimport os\n\nsys.path.append('../Lib')\nfrom commandexecutor import executeCommand\nfrom commandexecutor import validateAuthentication\nfrom IPAddressManager import parser\n\n\ntempLoc = '../../../../v2/eosimages'\ncmds =['enable','configure','install source %s now','reload now']\nwriteCmd =['enable','write']\n\nHTTP_HOST = os.environ[\"HTTP_HOST\"] \n\n\ndef executeImagePush(ips,un,pwd,cmds): \n if not un or not pwd:\n un ='cvpuser'\n pwd= 'root'\n deviceResp = {}\n for ip in ips:\n deviceResp[ip]= ''\n #step 1\n try: \n validateAuthentication(ip,un,pwd)\n deviceResp[ip] +='Authetication Status:Success'\n except Exception as e: \n deviceResp[ip] +='Authetication Status:Failed'\n continue\n #step 2\n try: \n executeCommand(ip,writeCmd,un,pwd) \n deviceResp[ip] +='\\nWrite Status:Success'\n except Exception as e: \n deviceResp[ip] +='\\nWrite Status:Failed' \n #step 3 \n try:\n deviceResp[ip] = '\\nRestart Response ' + str(executeCommand(ip,cmds,un,pwd) ) \n except Exception as e:\t\t\t\t\n if str(e) == 'unable to connect to eAPI':\n deviceResp[ip] += '\\nImage push to device %s is completed. Restart in progress...' %ip\n else:\n deviceResp[ip] += '\\nImage push to device %s is failed. Reason :%s' %(ip,str(e))\n\n return {'status':'success','data':deviceResp}\n\ndef storeFile(fileName,data):\n with open(fileName,'wb') as f:\n f.write(data)\n\ndef formatCmd(cmds,fileName,cgipath):\n #downloadUrl = 'http://'+HTTP_HOST+cgipath +'command/downloadimage.py?fname='+fileName\n downloadUrl = 'http://'+HTTP_HOST+'/pat/v2/eosimages/'+fileName\n cmds[2] = cmds[2] %downloadUrl\n return cmds\n\ndef main():\n global cmds\n # Create instance of FieldStorage \n form = cgi.FieldStorage()\n uploadedFile = form['imgFile'] \n #assert uploadedFile,'No image file uploaded'\n fileName = uploadedFile.filename \n assert fileName, 'Invalid file name'\n ips = form.getvalue('ips') \n un = form.getvalue('uname') \n pwd = form.getvalue('password') \n save = form.getvalue('save') \n cgipath = form.getvalue('cgipath') \n destPath = tempLoc+'/'+fileName\n assert ips and cgipath, 'Invalid Param'\n ips = [ip for ip in parser.parse(ips) if ip.strip()!='']\n assert len(ips)>0,'No Ip Address'\n\n ucmds = formatCmd(cmds,fileName,cgipath)\n storeFile(destPath,uploadedFile.file.read())\n try:\n resp = executeImagePush(ips,un,pwd,ucmds) \n except Exception as e:\n resp ={'status':'errror','data':str(e)}\n finally:\n try:\n os.remove(destPath) \n except:\n pass\n resp = json.dumps(resp)\n print resp\n \nif __name__ == \"__main__\": \n print \"Content-type:text/html\\r\\n\\r\\n\"\n global tempLoc\n try: \n #create the folder structure\n if not os.path.exists(tempLoc):\n os.makedirs(tempLoc)\n main()\n except Exception as e :\n print {'status':'error','data':'Something went wrong. Cause :' + str(e)}\n" }, { "alpha_fraction": 0.5913129448890686, "alphanum_fraction": 0.6061204075813293, "avg_line_length": 21.511110305786133, "blob_id": "d8776acb5c87f1ef447f8608c3a4a9e96ea31a01", "content_id": "f6e68ad532ed2800b46784a9b0aa4ca67ffc95ed", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1013, "license_type": "permissive", "max_line_length": 70, "num_lines": 45, "path": "/v2/js/ipscanner/ipscan.js", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "/*\nAuthor : Pradeep CH\nDate : 10-Feb-2017\nVersion : 1.0.0\nSince : 2.0.0\n*/\n\ncheckParam();\n\n\n//Actions\n$(document).on('click', '#reset', function (event) { \n\tloadResp('Performing IP scan. Please wait...') \n\tevent.preventDefault(); \n\t$('#startip').val('')\n\t$('#endip').val('') \n\t//$('#consoleContent').html('')\n});\n\n$(document).on('click', '#scan', function (event) { \t\t\t\n\tstartip =$('#startip').val()\n\tendip = $('#endip').val()\n\ttarget =cgiPath + 'IPSweep/ipsweep.py'\n\tmethod ='POST'\n\tdata = {'startip':startip,'endip':endip} \n\t$.triggerCall(target,method,data,loadResp)\n\t$('#reset').click()\n});\n\nfunction checkParam(){\n\tfromIP = getParamValue(document.location.href,'fromIP') \n\ttoIP = getParamValue(document.location.href,'toIP') \n\tif(fromIP===undefined || toIP===undefined || toIP==\"\" || fromIP==\"\"){\n\t\treturn\n\t} \n\t//if it's va;lid perform click \n\t$('#startip').val(fromIP)\n\t$('#endip').val(toIP) \n\t$('#scan').click()\n}\n\n\nfunction loadResp(resp){\n\t $('#consoleContent').html(resp).fadeIn(600); \n}\n" }, { "alpha_fraction": 0.6266397833824158, "alphanum_fraction": 0.6352169513702393, "avg_line_length": 26.48611068725586, "blob_id": "93f39cb1f7d8c7ba4356f8efb863954448e1216f", "content_id": "01961e5cc171631a2264557f3a8bdaf146fe6ae0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1982, "license_type": "no_license", "max_line_length": 156, "num_lines": 72, "path": "/cgi/cgi/v2/Lib/vmactions/getallvms.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "\n\"\"\"\nPython program for listing the vms on an ESX / vCenter host\n\"\"\"\n \nimport atexit \nimport sys\n\nsys.path.append('../Lib')\n\nfrom pyVim.connect import SmartConnect, Disconnect\nfrom pyVmomi import vim\n\ndef getVmInfo(vm, depth=1):\n vms = []\n \"\"\"\n Print information for a particular virtual machine or recurse into a folder\n or vApp with depth protection\n \"\"\"\n maxdepth = 10\n\n # if this is a group it will have children. if it does, recurse into them\n # and then return\n if hasattr(vm, 'childEntity'):\n if depth > maxdepth:\n return\n vmList = vm.childEntity\n for c in vmList:\n vms.extend(c, depth+1)\n return vms\n\n # if this is a vApp, it likely contains child VMs\n # (vApps can nest vApps, but it is hardly a common usecase, so ignore that)\n if isinstance(vm, vim.VirtualApp):\n vmList = vm.vm\n for c in vmList:\n vms.extend(c, depth+1)\n return vms\n\n summary = vm.summary\n vm = {'id':vm._moId,'name':summary.config.name,'status':summary.runtime.powerState,'memory':vm.config.hardware.memoryMB,'cpu':vm.config.hardware.numCPU} \n\n if summary.guest != None:\n ip = summary.guest.ipAddress\n if ip != None and ip != \"\":\n vm['ip']=ip\n vms.append(vm) \n return vms\n\ndef getAllVms(serverIp,username,password):\n #connect\n si = SmartConnect(host=serverIp,\n user=username,\n pwd=password) \n\n if not si:\n assert False,\"Could not connect to the specified host using specified username and password\" \n\n atexit.register(Disconnect, si)\n\n content = si.RetrieveContent()\n vms = []\n for child in content.rootFolder.childEntity:\n if hasattr(child, 'vmFolder'):\n datacenter = child\n vmFolder = datacenter.vmFolder\n vmList = vmFolder.childEntity\n for vm in vmList:\n vms.extend(getVmInfo(vm)) \n return vms\n\nif __name__=='__main__':\n print getAllVms('10.10.100.201','root','Payoda#89')\n \n" }, { "alpha_fraction": 0.6953229308128357, "alphanum_fraction": 0.6988863945007324, "avg_line_length": 23.63736343383789, "blob_id": "e3a1b7b06ae76f2404f7867f05dcfd3e8d5ecdfd", "content_id": "2b87e60a992784f126d9f97d4228cc7c5bd48985", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2245, "license_type": "no_license", "max_line_length": 72, "num_lines": 91, "path": "/cgi/cgi/v2/IPSweep/historyManger.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python2\n\nimport sys\n\nimport os.path\nimport os\nimport datetime\nfrom os.path import expanduser \n\nsys.path.append('../Lib')\nfrom commonutil import getRandomId\nfrom htmlutil import html \n\n\nsourcedirectory = expanduser(\"~\") + \"/pat/\"\nsourceFolder = sourcedirectory +'/sweephistory'\n\nfileFormat= '%s/%s'\nsource = fileFormat %(sourceFolder,'files')\nmaxRec =10\n\n#create the folder structure\nif not os.path.exists(sourceFolder):\n\tos.makedirs(sourceFolder)\n\topen(source,'w')\n\ndef getHistory():\n\tif not os.path.exists(source):\n\t\treturn []\n\n\twith open(source,'r') as f:\n\t\tdata = f.read()\n\t\tif not data or data.strip()=='':\n\t\t\treturn []\n\t\treturn data.split('\\n') \n\ndef writeData(filename,data):\n\twith open(filename,'w') as f:\n\t\tf.write(data)\n\ndef updateHistory(fileNames):\n\twith open(source,'w') as f:\n\t\tf.write('\\n'.join(fileNames))\n\n#This changes the data to complete html \ndef formatData(data,startIP,endIP,reg_format_date):\n\tif data:\n\t\ttitle = 'Sweep History:%s-%s' %(startIP,endIP)\t\t\n\t\tprefix =''\n\t\t#prefix ='Content-type:text/html\\r\\n\\r\\n'\n\t\tprefix +='<html><head><title>%s</title></head></body>' %title\n\t\tprefix +='<h3>Sweep History </h3>'\n\t\tprefix +='Date :%s</br>' %(reg_format_date) \n\t\tprefix +='Range :%s&nbsp;-&nbsp;%s</br></br>' %(startIP,endIP)\n\t\t\n\t\tdata =prefix + data\n\t\tdata +='</body></html>'\n\t\treturn data\n\treturn 'No data'\n\ndef readHistory(fname):\n\tfullname = fileFormat %(sourceFolder,fname)\n\tif os.path.isfile(fullname):\n\t\twith open(fullname) as f:\n\t\t\treturn f.read()\n\treturn 'No data to be shown'\n\ndef saveHistory(data,startIp,endIP):\n\tglobal maxRec\n\td_date = datetime.datetime.now()\n\n\tfileName = getRandomId() +'.htm'\n\n\treg_format_date = d_date.strftime(\"%Y-%m-%d %H:%M:%S\")\n\n\tfilePath = fileFormat %(sourceFolder,fileName)\n\n\thistoryentry = '%s %s %s %s' %(fileName,startIp,endIP,reg_format_date)\t\n\t\n\texistingFileNames = getHistory()\n\n\tif len(existingFileNames)>=maxRec:\n\t\tlastEntry = existingFileNames[maxRec-1]\n\t\tos.remove(fileFormat %(sourceFolder,lastEntry.split(' ')[0]))\n\t\texistingFileNames.remove(existingFileNames[maxRec-1])\n\t\n\tdata = formatData(data,startIp,endIP,reg_format_date)\n\twriteData(filePath,data)\n\texistingFileNames.reverse()\n\texistingFileNames.append(historyentry)\n\tupdateHistory(existingFileNames)\n\t\t\n" }, { "alpha_fraction": 0.5614035129547119, "alphanum_fraction": 0.5780701637268066, "avg_line_length": 26.14285659790039, "blob_id": "b0511bc3b8598ea6849ab2ab4cc1b3d9c0072eca", "content_id": "40a83e4e652511c0f66bdea715b4e4e18af5a035", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1140, "license_type": "no_license", "max_line_length": 86, "num_lines": 42, "path": "/cgi/cgi/v2/Lib/vmactions/vminfo.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n \n\"\"\" \n\"\"\"\n\nimport atexit \nimport sys\nimport re\n\nsys.path.append('../')\n\nfrom pyVim.connect import SmartConnect, Disconnect\nfrom pyVmomi import vim, vmodl\n\ndef getDataStore(path):\n try:\n return re.match('.*\\[(.*)\\].*',path).group(1)\n except:\n return ''\ndef getVMInfo(ip,un,pwd,dname):\n si = SmartConnect(host=ip,\n user=un,\n pwd=pwd)\n assert si, \"Cannot connect to specified host using specified username and password\"\n \n info = []\n content = si.content\n objView = content.viewManager.CreateContainerView(content.rootFolder,\n [vim.VirtualMachine],\n True)\n vmList = objView.view \n objView.Destroy()\n\n vm = [vm for vm in vmList if vm.name == dname]\n assert len(vm) ==1,\"Device info not found in the server\"\n vm = vm[0] \n moid = vm._moId\n datastore = getDataStore( vm.summary.config.vmPathName)\n return {'moid':moid,'datastore':datastore} \n\nif __name__=='__main__':\n print getVMInfo(\"10.10.100.201\",'root','Payoda#89','vEOS-65.91')\n" }, { "alpha_fraction": 0.6852818131446838, "alphanum_fraction": 0.6868476271629333, "avg_line_length": 34.128440856933594, "blob_id": "4448c46e63475b6ced5060b4d4bf3c15f2d7df84", "content_id": "8efd6ed715e9c1c86ab6c4d40272b887683be746", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3832, "license_type": "no_license", "max_line_length": 138, "num_lines": 109, "path": "/cgi/cgi/v2/scripter/scriptmanager.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python2\n \nimport sys\nimport os\nfrom os.path import expanduser \nimport re\nimport json\nimport datetime\nsys.path.append('../Lib')\n \nfrom sqldb import db\nfrom beans import EnvInfo\nfrom ParamikkoUtil import checkAuthentication,executeCommand,copyToRemote\nfrom usermanager import authenticate\nimport cgi, cgitb \n\n\nsourcedirectory = expanduser(\"~\") + \"/pat/\"\nscriptDir = sourcedirectory +'scripts/'\n\n\ndef getScripts(): \n #def getData(self,tablename,keyset,key=None,keyidentifier = ''):\n try:\n data = db.getData('ScriptMapper',['id','fileName','uploadedBy','uploadedOn','param']) \n return {'status':'success','data':data}\n except Exception as e:\n return {'status':'error','data':'Unexpected excpetion'}\n\ndef uploadFile(data,fileName): \n fullPath = scriptDir+ fileName\n assert not os.path.exists(fullPath), 'Duplicate script'\n with open(fullPath,'wb') as f:\n f.write(data)\n\ndef saveScriptInfo(filename,uploadedBy,param,scriptType):\n uploadedOn = str(datetime.datetime.now().date())\n db.addEntry({'fileName':filename,'uploadedBy':uploadedBy,'uploadedOn':uploadedOn,'param':param,'scriptType':scriptType},'ScriptMapper')\n\ndef getScriptInfo(scriptId):\n data = db.getData('ScriptMapper',['id','fileName','uploadedBy','uploadedOn','param'],scriptId,'id')\n assert len(data)==1,'No info'\n return data[0]\n\ndef execute(scriptid,ip,username,password,arg):\n sinfo = getScriptInfo(scriptid) \n #Authenticate\n assert checkAuthentication(ip,username,password),'Authentication failure'\n #copy to remote\n remotePath = '/tmp/' + sinfo['fileName']\n sourceFile = scriptDir + sinfo['fileName']\n try:\n copyToRemote(ip,username,password,sourceFile,remotePath)\n except Exception as e:\n assert False,'Script execution on remote machine failed. Cause :' + str(e)\n permissionSetCommand = 'chmod 777 ' + remotePath\n exeCommand = remotePath+' ' + arg\n removeCommand = 'rm -rf ' + remotePath\n executeCommand(ip,username,password,permissionSetCommand)\n resp = executeCommand(ip,username,password,exeCommand)\n executeCommand(ip,username,password,removeCommand)\n return {'status':'success','data':'%s' %resp}\n\ndef getScriptInfoResponse(sid):\n sinfo = getScriptInfo(sid) \n return {'status':'success','data':sinfo}\n\ndef main():\n form = cgi.FieldStorage() \n act = form.getvalue('action') \n resp =''\n\t \n if act == 'getScripts': \n resp = getScripts() \n elif act=='upload':\n fileName = form.getvalue('scriptname') \n username = form.getvalue('username') \n password = form.getvalue('password') \n scriptType = form.getvalue('scriptType') \n param = form.getvalue('param') \n uploadedFile = form['script']\n assert fileName and username and password and scriptType, 'All the fields are mandotory'\n assert authenticate(username,password),'Invalid authetication info'\n uploadFile(uploadedFile.file.read(),fileName)\n saveScriptInfo(fileName,username,param,scriptType)\n resp = {'status':'success','data':'Script upload successfull'}\n elif act == 'execute':\n ip = form.getvalue('ip') \n username = form.getvalue('username') \n password = form.getvalue('password') \n scriptid = form.getvalue('id') \n arg = form.getvalue('arg') \n assert ip and username and password and scriptid and arg, 'All the fields are mandotory'\n resp = execute(scriptid,ip,username,password,arg)\n elif act == 'getScriptInfo':\n sid = form.getvalue('id') \n assert sid , 'Invalid script identifier'\n resp = getScriptInfoResponse(sid)\n else:\n assert False, 'Invalid action'\n\n print json.dumps(resp)\n\nif __name__ == \"__main__\":\n print 'Content-type:text/html\\r\\n'\n try:\n main()\n except Exception as e :\n print json.dumps({'status':'error','data':'%s' %str(e)})\n \n" }, { "alpha_fraction": 0.5984455943107605, "alphanum_fraction": 0.609455943107605, "avg_line_length": 24.31147575378418, "blob_id": "2a37efb4426ef051903313318c243238814ed3b3", "content_id": "236dab0494fe27953ad379ed9113e45c41b48326", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1544, "license_type": "permissive", "max_line_length": 123, "num_lines": 61, "path": "/v2/js/ipscanner/history.js", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "/*\nAuthor : Pradeep CH\nDate : 10-Feb-2017\nVersion : 1.0.0\nSince : 2.0.0\n*/\n\n\ngetHistory();\n\nfunction getHistory(){ \n\tloadResp('Loading history')\n\ttarget= cgiPath + 'IPSweep/historyView.py',\n \tmethod= 'post'\n \tdata = {'action':'history'}\n\t$.triggerCall(target,method,data,loadHistory)\n}\n\nfunction loadHistory(resp){ \t \n\tvar obj = jQuery.parseJSON( resp );\n\tif (obj['status'] =='error'){ \n\t\tloadResp(obj['data'])\n\t}else{\n\t\tcontent = ''\n\t\tdata = obj['data'] \n\t\tif (Object.keys(data).length == 0 ){\n\t\t\t\tloadResp('No data to load')\n\t\t}else{\n\t\t\ti = 1\t\t\t\n\t\t\t$(data).each(function(){\n\n\t\t\t\tlink = '<a href='+this.fileName+' class=\"historyRef\" >'+this.startIp+'-'+this.endIp+'</a>'\n\t\t\t\tscanLink ='<a target=default href=ipscanner.htm?fromIP='+this.startIp+'&toIP='+this.endIp+' \">Scan</a>' \n\t\t\t\t$('#history tr:last').after('<tr><td>'+(i++)+'</td><td>'+link+'</td><td>'+this.date+'</td><td>'+scanLink+'</td></tr>');\n\t\t\t});\n\t\t} \n\t}\n\n\tloadResp('History loaded.')\n}\n//Actions\n\n$(document).on('click', '.historyRef', function (event) { \n\tevent.preventDefault(); \n\tloadResp('Loading history')\n\ttarget= cgiPath + 'IPSweep/historyView.py',\n \tmethod= 'post'\n \tdata = {'action':'loadhistory','fname':$(this).attr('href')}\n\t$.triggerCall(target,method,data,loadHistoryResult)\n});\n\nfunction loadHistoryResult(resp){\n\tvar obj = jQuery.parseJSON( resp ); \n\tcontent ='Status :' + obj['status']\n\tcontent +='</br>'\n\tcontent +=obj['data']\n\tloadResp(content)\n}\nfunction loadResp(resp){\n\t $('#consoleContent').html(resp).fadeIn(600); \n}\n" }, { "alpha_fraction": 0.6666666865348816, "alphanum_fraction": 0.6666666865348816, "avg_line_length": 6.800000190734863, "blob_id": "a28ba4c1fdac5574880053b1f092737c62d95a5b", "content_id": "3448ff77cf25513384217b80abf8c48cf054a4b5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 78, "license_type": "no_license", "max_line_length": 36, "num_lines": 10, "path": "/cgi/cgi/v2/Lib/IPAddressManager/__init__.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#\n#\n#\n#\n#\n#\n#\n\nfrom IPManger import IPAdressParcer\nparser = IPAdressParcer()\n" }, { "alpha_fraction": 0.599126935005188, "alphanum_fraction": 0.610040009021759, "avg_line_length": 26.489999771118164, "blob_id": "e20216704f273a0ea64aec9cca59a38caaa11a9e", "content_id": "35439dcc37015f7c975a1c75f138107fbdb8fd38", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 2749, "license_type": "permissive", "max_line_length": 200, "num_lines": 100, "path": "/v2/js/DeviceAllocator/Hypervisor/add.js", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "/*\nAuthor : Pradeep CH\nDate : 20-May-2017\nVersion : 1.0.0\nSince : 2.0.0\n*/\n\n\ncheckParam();\n\nfunction checkParam(){ \n\tuid = getParamValue(document.location.href,'uid') \n\tif(uid==undefined){\n\t\treturn\n\t}\n\tloadResp('Loading server info..') \n\t$('#uid').val(uid)\n\tgetServerInfo(uid)\n}\n\nfunction getServerInfo(uid){ \n\t$('#consoleContent').fadeOut(800,function(){$('#consoleContent').html('Loading servers. Please wait..').fadeIn(200)}) \n\ttarget =cgiPath + 'DeviceManager/Hypervisor/manager.py'\n\tdata = {'action':'loadServer','uid':uid,'uname':'','pwd':''}\n\tmethod ='POST'\n\t$.triggerCall(target,method,data,loadServer)\n}\n\nfunction loadServer(resp){ \n\tvar obj = jQuery.parseJSON( resp );\n\tif (obj['status'] ==='error'){ \n\t\tloadResp(obj['data'])\n\t}\n\telse{ \n\t\tserver = obj['data']\n\t\t$('#un').val(server.username)\n\t\t$('#pwd').val(server.password)\n\t\t$('#ip').val(server.ip)\n\t\t$('#type').val(server.type)\n\t\t$('#identifier').val(server.identifier)\n\t\t$('#status').val(server.status) \n\t\tloadResp('Server loaded successfully')\n\t}\n}\n\n\n//Actions\n$(document).on('click', '#statusCheck', function (event) { \n\tevent.preventDefault(); \n\t$('#consoleContent').html('Status check is in progress...').fadeIn(600); \n\ttarget =cgiPath + 'DeviceManager/SystemStatus/manager.py'\n\tact = 'statusServer'\n\tdata = {'action':act,'ip':$('#ip').val(),'uname':$('#un').val(),'pwd':$('#pwd').val()}\n\t$.triggerCall(target,'POST',data,loadStatusResp)\n});\n\nfunction loadStatusResp(r){\n\tvar obj = jQuery.parseJSON( r );\n\t \n\tif (obj['status'] ==='success'){ \n\t\t$('#status').val(obj['data'])\n\t\tloadResp('Status check completed')\n\t}else{\n\t\tloadResp(obj['data'])\n\t\t$('#status').val('Failed')\t\n\t}\n}\n$(document).on('click', '#reset', function (event) { \n\tevent.preventDefault(); \n\t$('#un').val('')\n\t$('#pwd').val('')\n\t$('#ip').val('')\n\t$('#type').val('')\n\t$('#identifier').val('') \n\t$('#status').val('Not checked') \n});\n\n$(document).on('click', '#save', function (event) { \n\tevent.preventDefault(); \n\t$('#consoleContent').html('Saving server info. Please wait').fadeIn(600); \n\ttarget =cgiPath + 'DeviceManager/Hypervisor/manager.py'\n act='addserver'\n if($('#uid').val() != undefined && $('#uid').val()!=''){\n\t\tact = 'updateserver'\n\t}\n\tdata = {'action':act,'ip':$('#ip').val(),'uname':$('#un').val(),'pwd':$('#pwd').val(),'uid':$('#uid').val(),'type':$('#type').val(),'identifier':$('#identifier').val(),'status':$('#status').val()}\n\tmethod ='POST'\n\t$.triggerCall(target,method,data,loadActResp)\n});\nfunction loadActResp(resp){\n\tvar obj = jQuery.parseJSON( resp );\n\t \n\tif (obj['status'] ==='success'){ \n\t\t$('#reset').click()\n\t}\n\tloadResp(obj['data'])\n}\nfunction loadResp(resp){\n\t$('#consoleContent').fadeOut(100,function(){$('#consoleContent').html(resp).fadeIn(600);});\n}\n" }, { "alpha_fraction": 0.5839571952819824, "alphanum_fraction": 0.596256673336029, "avg_line_length": 22.375, "blob_id": "3b9ce1c3c07fc082d9679d9d7bf3b391c97fc583", "content_id": "5570d2e4a81d8569311b5d2a541ead643812626e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1870, "license_type": "permissive", "max_line_length": 65, "num_lines": 80, "path": "/v2/js/ssl/manage.js", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "/*\nAuthor : Pradeep CH\nDate : 13-Feb-2017\nVersion : 1.0.0\nSince : 2.0.0\n*/\n\n \ncheckParam();\n\nfunction checkParam(){ \n\tcn = getParamValue(document.location.href,'cn') \n\tif(cn=== undefined ){\n\t\t$('#manageconsole').html('Invalid request!!').fadeIn(800)\n\t\treturn\n\t} \n\t$('#orginalCa').html(cn) \n\t$('#csrCA').val(cn) \n} \n\n//Action\n//CA CERTS\n$(document).on('click', '.caAct', function (event) {\n\tca = $('#orginalCa').html()\n\tact = $(this).attr('act')\n\ttarget = cgiPath + 'ssl/manager.py' \n\tdata = {'ca':ca,'action':act}\n\t$.redirectPost(target,data);\n});\n\n//USER CERT\n$(document).on('click', '#GenerateUserCert', function (event) {\n\tca = $('#orginalCa').html()\n\tact = $(this).attr('act')\n\tusername = $('#username').val()\n\ttarget = cgiPath + 'ssl/manager.py' \n\tdata = {'ca':ca,'action':act,'username':username}\n\t$.redirectPost(target,data);\n\t$('#username').val('')\n});\n\n//DEVICE CERT \n$(document).on('click', '#GenerateDeviceCert', function (event) {\n\tca = $('#orginalCa').html()\n\tact = $(this).attr('act')\n\tdeviceName = $('#deviceName').val()\n\ttarget = cgiPath + 'ssl/manager.py' \n\tdata = {'ca':ca,'action':act,'deviceName':deviceName}\n\t$.redirectPost(target,data);\n\t$('#deviceName').val('')\n});\n\n$(document).on('change','#csrFile',function(){\n var files= $('#csrFile')[0].files\n if(files.length==0){\n $('#fileLabel').val('Select an CSR file');\n return\n }\n $('#fileLabel').val(files[0].name);\n});\n\n//CSR CERT\n/*\n$(document).on('click', '#signCSR', function (event) {\n\tca = $('#orginalCa').html()\n\tact = $(this).attr('act')\n\tcsrFile = $('#csr')[0].files[0]\n\tif(csrFile === undefined){\n\t\talert('Upload a CSR')\n\t\treturn\n\t}\n\ttarget = cgiPath + 'ssl/manager.py' \n\tdata = {'ca':ca,'action':act,'csrFile':csrFile}\n\t$.redirectPost(target,data);\n\t$('#deviceName').val('')\n});\n*/\nfunction loadResp(resp){\n\t $('#consoleContent').html(resp).fadeIn(600); \n}\n" }, { "alpha_fraction": 0.4828137457370758, "alphanum_fraction": 0.49000799655914307, "avg_line_length": 28.046510696411133, "blob_id": "b8bfb9326940d7525ada9057e6c8cb523ceb30e9", "content_id": "e5aefeb3d1ea2fe39cae5de6022acd3ec5db6107", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1251, "license_type": "no_license", "max_line_length": 49, "num_lines": 43, "path": "/cgi/cgi/v2/Lib/dhcpconfig/JsonParser.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "__author__='Pradeep CH'\n__version__ ='1.0.0'\n__date__= '10- Mar- 2017'\n\nimport json\nfrom beans import Config,Pool,Host,Subnet\n\nclass JsonParser(object): \n @staticmethod\n def parse(jsonData):\n c = Config()\n jsonObj = json.loads(jsonData)\n subnets = jsonObj['subnets']\n for subnet in subnets:\n s= Subnet()\n pools = subnet['pools']\n for pool in pools:\n p = Pool()\n hosts = pool['hosts']\n for host in hosts:\n h = Host()\n h.mac = host['mac']\n h.ip = host['ip']\n p.hosts.append(h)\n p.rangeStart = pool['rangeStart']\n p.rangeEnd = pool['rangeEnd']\n p.bootfileName = pool['bootfileName']\n p.subnetMask = pool['subnetMask']\n p.status = pool['status']\n p.routers = pool['routers'] \n p.assignedDate = pool['assignedDate']\n s.pools.append(p)\n hosts = subnet['hosts']\n for host in hosts:\n h = Host()\n h.mac = host['mac']\n h.ip = host['ip']\n s.hosts.append(h)\n \n s.subnet= subnet['subnet']\n s.netmask= subnet['netmask']\n c.subnets.append(s)\n return c\n\n\n" }, { "alpha_fraction": 0.7285998463630676, "alphanum_fraction": 0.7319177389144897, "avg_line_length": 26.309091567993164, "blob_id": "a0ad08c6e8797e411c5e6c3d0996e4507b051f62", "content_id": "7fe15e0b8398ed7a652ad059bc45c36d66a6eada", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1507, "license_type": "no_license", "max_line_length": 66, "num_lines": 55, "path": "/cgi/cgi/v2/Lib/ParamikkoUtil.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "\nimport paramiko\nimport uuid\nimport os\n\ndef copyRemoteFile(ip,username,password,remotepath,localfileName):\n\t#print 'Copying file from %s to local' %(ip) \n\tssh = getSSHClient(ip,username,password)\n\tftp = ssh.open_sftp()\n\tftp.get( remotepath,localfileName)\n\tftp.close()\n\ndef getSSHClient(ip,un,pwd):\n\tssh = paramiko.SSHClient()\n\tssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n\tssh.connect(ip, username=un,password=pwd,timeout=10)\n\treturn ssh\n\ndef checkAuthentication(ip,username,password): \n\ttry:\n\t\tssh = getSSHClient(ip,username,password) \n\t\treturn True\n\texcept paramiko.AuthenticationException as e:\n\t\treturn False \n\ndef executeCommand(ip,username,password,cmd):\n\tssh = getSSHClient(ip,username,password) \n\tstdin, stdout, stderr = ssh.exec_command(cmd)\n\tresp ='No response'\t\n\tif stdout:\n\t\tresp = stdout.read()\n\tssh.close()\n\treturn resp\n\ndef copyToRemote(ip,un,pwd,sourceFile,remotePath):\n\tssh = getSSHClient(ip,un,pwd)\n\tftp = ssh.open_sftp()\n\tftp.put( sourceFile,remotePath)\n\tftp.close()\n\t\ndef readRemoteFile(ip,un,pwd,remotefileName,port=22):\n\t'''transport = paramiko.Transport((ip, port))\n \ttransport.connect(username = un, password = pwd) \n\tsftp = paramiko.SFTPClient.from_transport(transport)\n\twith open(fileName, 'r') as f:\n \tdata = f.read()\n\treturn data''' \n\tdata =''\n\tunique_filename = str(uuid.uuid4())+'dhcpd.conf'\n\tcopyRemoteFile(ip,un,pwd,remotefileName,unique_filename)\n\n\twith open(unique_filename ,'r') as f:\n\t\tdata = f.read()\n\n\tos.remove(unique_filename )\n\treturn data\t \n\n\n" }, { "alpha_fraction": 0.6512879729270935, "alphanum_fraction": 0.6528258323669434, "avg_line_length": 25.489795684814453, "blob_id": "0e2b9ea262de867b63ea31de3af2c1d399f5316f", "content_id": "6513c15640e64771839c3220d463fbdb9255e152", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2601, "license_type": "no_license", "max_line_length": 116, "num_lines": 98, "path": "/cgi/cgi/v2/DeviceManager/Hypervisor/manager.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python2\n \nimport sys\nimport os\nfrom os.path import expanduser \nimport re\nimport json\n\n\nfrom hypercommons import getServerInfoById\n\nsys.path.append('../../Lib')\n\nfrom commonutil import getRandomId \nfrom beans import Hypervisor\nfrom sqldb import db\nimport cgi, cgitb \n\n__author__ ='Pradeep CH'\n__version__='1.0.0'\n \n\ntargetName = 'Hypervisors'\n\ndef addServer(s):\n try:\n #addEntry(self,mapper,tablename)\n db.addEntry(s.getAttributes(),targetName)\n return {'status':'success','data':'Hypervisor added successfully'}\n\n except Exception as e:\n return {'status':'error','data':str(e)}\n\n\ndef updateServer(s,key,keyidentifier):\n try:\n #updateEntry(self,mapper,tablename,key,keyidentifier):\n db.updateEntry(s.getAttributes(),targetName,key,'id')\n return {'status':'success','data':'Environment updated successfully'}\n except Exception as e:\n return {'status':'error','data':str(e)}\n\ndef getAllServers():\n try: \n s = Hypervisor()\n #def getData(self,tablename,keyset,key=None,keyidentifier = ''):\n data = db.getData(targetName,s.getAttributes().keys())\n return {'status':'success','data':data}\n except Exception as e:\n return {'status':'error','data':str(e)}\n\ndef getServerInfo(uid):\n try: \n data = getServerInfoById(uid)\n return {'status':'success','data':data}\n except Exception as e:\n return {'status':'error','data':str(e)}\n\ndef main():\n\tform = cgi.FieldStorage() \n\tact = form.getvalue('action') \n\t\n\ts = Hypervisor()\n\ts.id = form.getvalue('uid')\n\ts.ip = form.getvalue('ip')\n\ts.username = form.getvalue('uname')\n\ts.password =form.getvalue('pwd')\n\ts.type =form.getvalue('type')\n\ts.identifier =form.getvalue('identifier') \n\ts.status =form.getvalue('status') \n\n\tresp =''\n\t\n\t#TODO validate\n\tif act == 'addserver':\n\t\tassert s.ip and s.username and s.password and s.type and s.identifier,'Invalid data. All the fields are required'\t\n\t\ts.id= getRandomId()\n\t\tresp = addServer(s)\n\telif act =='updateserver':\n\t\tassert s.ip and s.username and s.password and s.type and s.identifier,'Invalid data. All the fields are required'\t\n\t\tassert s.id, 'Invalid id'\n\t\tresp = updateServer(s,s.id,'id')\n\telif act == 'loadservers':\n\t\tresp = getAllServers()\n elif act == 'loadServer':\n\t\tresp = getServerInfo(s.id)\n\telse:\n\t\tresp = {'status':'error','data':'Invalid action'} \n\t\n\tresp = json.dumps(resp)\t\t\n\tprint resp\n\nif __name__ == \"__main__\":\n print 'Content-type:text/html\\r\\n'\n try:\n main()\n except Exception as e :\n print json.dumps({'status':'error','data':'%s' %str(e)})\n \n\n\n" }, { "alpha_fraction": 0.6915887594223022, "alphanum_fraction": 0.7336448431015015, "avg_line_length": 22.33333396911621, "blob_id": "ce554789b396b23f4011f00d9ede3a806eda5817", "content_id": "73e44f2b77333fe7b3177ff5a4cb041ee065623a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 214, "license_type": "no_license", "max_line_length": 46, "num_lines": 9, "path": "/cgi/cgi/v2/Lib/vmactions/__init__.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "\n__author__='Pradeep CH'\n__version__ ='1.0.0'\n__date__= '10- Mar- 2017'\n\n\nfrom getallvms import getAllVms\nfrom vmmanager import start,stop\nfrom remoteconsole import triggerRemoteConsole\nfrom vminfo import getVMInfo \n\n" }, { "alpha_fraction": 0.6014925241470337, "alphanum_fraction": 0.6358209252357483, "avg_line_length": 28.130434036254883, "blob_id": "bd7039224906f0d5498d5ff7c37a7fd8befc1afa", "content_id": "bb419c8bfd37ca116c702d6a3621bec7705988f3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 670, "license_type": "permissive", "max_line_length": 75, "num_lines": 23, "path": "/v2/js/DeviceAllocator/DHCP/search.js", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "/*\nAuthor : Pradeep CH\nDate : 23-May-2017\nVersion : 1.0.0\nSince : 2.0.0\n*/\n \n$(document).on('click', '#search', function (event) { \n\t$('#searchResult').html('Search in progress please wait').fadeIn(1000)\n\tevent.preventDefault(); \n\tkeyword =$('#keyword').val()\n\tforce = $('#force').prop('checked')\n\t$('#consoleContent').html('Saving server info. Please wait').fadeIn(600); \n\ttarget =cgiPath + 'DeviceManager/DHCP/search.py'\n\tdata = {'keyword':keyword,'force':force} \n\tmethod ='POST'\n\t$.triggerCall(target,method,data,showResp) \n});\n\nfunction showResp(resp){ \n\tvar obj = jQuery.parseJSON( resp ); \n\t$('#searchResult').html(obj['data']).fadeIn(1000)\n}\n" }, { "alpha_fraction": 0.6168360114097595, "alphanum_fraction": 0.6168360114097595, "avg_line_length": 27.12244987487793, "blob_id": "dbbebb5d6b697dd9eefa63705453c3db798663d9", "content_id": "2335cfef4e18de6e4abfcf57fb56e838007c3c23", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1378, "license_type": "no_license", "max_line_length": 78, "num_lines": 49, "path": "/cgi/cgi/v2/command/commandExecute.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\nimport sys\nimport re\nimport json\nsys.path.append('../Lib')\nfrom IPAddressManager import parser\nfrom commandexecutor import executeCommand\n\n# Import modules for CGI handling \nimport cgi, cgitb \n\n\ndef executeCommands(ips,cmds,un,pwd): \n deviceLevelResp = []\n for ip in ips: \n try:\n tempResp = executeCommand(ip,cmds,un,pwd) \n deviceLevelResp.append({'status':'success','ip':ip,'data':tempResp})\n except Exception as e:\n deviceLevelResp.append({'status':'error','ip':ip,'data':str(e)}) \n return {'status':'success','data':deviceLevelResp}\n\ndef main():\n # Create instance of FieldStorage \n form = cgi.FieldStorage() \n ips = form.getvalue('ips') \n un = form.getvalue('username') \n pwd = form.getvalue('password') \n cmd = form.getvalue('cmd') \n if not cmd or not ips:\n resp = {'status':'error','data':'Invalid params'}\n else: \n cmds= re.split('\\n+',str(cmd))\n ips = [ip for ip in parser.parse(ips) if ip.strip()!='']\n if not un or not pwd:\n un = 'cvpuser'\n pwd ='root' \n resp = executeCommands(ips,cmds,un,pwd)\n \n resp = json.dumps(resp)\n print resp\n \nif __name__ == \"__main__\":\n print \"Content-type:text/html\\r\\n\\r\\n\"\n try:\n main()\n except Exception as e :\n print {'status':'error','data':'Something went wrong. Cause :' + str(e)}\n" }, { "alpha_fraction": 0.52069491147995, "alphanum_fraction": 0.523079514503479, "avg_line_length": 27.5, "blob_id": "6f84d43070fb3bc40197bb732d6a61f680a96350", "content_id": "cc9fc796e96a3dd97982c6f20bd2832ffae2f1f6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5871, "license_type": "no_license", "max_line_length": 123, "num_lines": 206, "path": "/cgi/cgi/v2/Lib/sqldb/SQLLite.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python \n__author__ ='Pradeep'\n\n\nfrom os.path import expanduser \nimport sys\nimport sqlite3\nimport os\nimport datetime\n\nsourcedirectory = expanduser(\"~\") + \"/pat/db/\"\ndb_file = sourcedirectory+'source.db'\nlogfile = sourcedirectory+'log'\nlogformmat = '%s %s %s\\n'\nERROR ='ERROR'\n\nclass SQLDB(object):\n def __init__(self):\n self.log('Initilizing sql db')\n self.check()\n self.loadNamedEntries()\n\n def log(self,msg,mode='DEBUG'):\n with open(logfile,'a') as f:\n f.write(logformmat %(str(datetime.datetime.now()),mode,msg))\n\n def getNamedTrigger(self,name):\n entry= self.namedGetCalls[name]\n q =entry[0]\n keyset =entry[1]\n assert q, 'There is no named GET call associated to' + str(name)\n try:\n conn = sqlite3.connect(db_file) \n cur = conn.cursor() \n self.log(q)\n data = cur.execute(q).fetchall() \n data = self.formatResp(data,keyset)\n return data\n except Exception as e :\n self.log(str(e),ERROR)\n if conn != None:\n conn.close()\n assert False, e \n finally:\n conn.close()\n return \n\n def addEntry(self,mapper,tablename):\n try: \n conn = sqlite3.connect(db_file) \n cur = conn.cursor()\n s= self.prepareAddCommand(mapper,tablename)\n self.log(s)\n cur.executescript(s)\n conn.commit() \n except Exception as e :\n self.log(str(e),ERROR) \n assert False, e\n finally:\n conn.close()\n\n def performAction(self,target,action,condition):\n self.log('Perform action called with action :'+ str(action)+', target :'+str(target)+', condition :'+ str(condition))\n q = ''\n assert target and action and condition, \"invalid param\"\n if action == 'delete':\n q = 'delete from '+ target +' where ' + condition\n else:\n assert False, 'Invalid action'\n self.log(q)\n try:\n conn = sqlite3.connect(db_file) \n cur = conn.cursor()\n data = cur.executescript(q)\n conn.commit()\n except Exception as e:\n self.log(str(e),ERROR)\n if conn != None:\n conn.close()\n assert False, e\n finally:\n if conn != None:\n conn.close()\n\n def getData(self,tablename,keyset,key=None,keyidentifier = ''):\n try:\n conn = sqlite3.connect(db_file) \n cur = conn.cursor()\n s= self.prepareGetQuery(tablename,keyset,key,keyidentifier) \n self.log(s)\n data = cur.execute(s).fetchall() \n data = self.formatResp(data,keyset)\n return data\n except Exception as e :\n self.log(str(e),ERROR)\n if conn != None:\n conn.close()\n assert False, e \n finally:\n conn.close()\n\n def getDataWithCondition(self,tablename,keyset,condition):\n try:\n conn = sqlite3.connect(db_file) \n cur = conn.cursor()\n s= self.prepareGetQuery(tablename,keyset,None,'')\n s = s + ' where ' + condition \n self.log(s)\n data = cur.execute(s).fetchall() \n data = self.formatResp(data,keyset)\n return data\n except Exception as e :\n self.log(str(e),ERROR)\n if conn != None:\n conn.close()\n assert False, e\n finally:\n if conn != None:\n conn.close()\n\n def formatResp(self,data,keyset):\n resp =[]\n for row in data: \n r = {}\n for i in range(len(keyset)):\n r[keyset[i]] = row[i]\n resp.append(r)\n return resp\n\n def prepareGetQuery(self,t,ks,k,ki):\n query = 'select %s from %s ' \n wq = '' \n if k and type(k)==str:\n wq= ' where %s like \"%s\" ' %(ki,k)\n elif k and type(k)==int:\n wq= ' where %s=%d ' %(ki,k)\n\n query = query %(','.join(ks),t)+wq \n return query\n\n def updateEntry(self,mapper,tablename,key,keyidentifier):\n try:\n conn = sqlite3.connect(db_file) \n cur = conn.cursor()\n s= self.prepareUpdateCommand(mapper,tablename,key,keyidentifier)\n self.log(s)\n cur.executescript(s)\n conn.commit()\n except Exception as e :\n self.log(str(e),ERROR)\n if conn != None:\n conn.close()\n assert False, e\n finally:\n if conn != None:\n conn.close()\n\n def prepareUpdateCommand(self,m,t,kv,ki):\n query = 'update %s set %s where %s like \"%s\";'\n val = [] \n for k in m: \n val.append('%s=\"%s\"' %(k,m[k]))\n q= query %(t,','.join(val),ki,kv) \n return q\n\n def prepareAddCommand(self,m,t):\n query = 'insert into %s(%s) values(%s);'\n col = []\n val = []\n for k in m:\n col.append(k)\n val.append(\"'%s'\" %m[k])\n query = query %(t,','.join(col),','.join(val)) \n return query\n\n def check(self): \n self.log('Checking and creating tables')\n #create db\n try:\n conn = sqlite3.connect(db_file) \n cur = conn.cursor()\n with open(sourcedirectory+'initialscript','r') as f:\n s = f.read() \n cur.executescript(s)\n conn.commit()\n except Exception as e:\n self.log(str(e),ERROR)\n if conn != None:\n conn.close()\n finally:\n try:\n conn.close()\n except: \n pass\n self.log('Checking compeleted')\n\n def loadNamedEntries(self):\n self.namedGetCalls = {}\n with open(sourcedirectory+'namedgetcalls','r') as f:\n s = f.readlines() \n for line in s:\n data = line.split('::')\n key = data[0]\n q = data[1]\n keyset = data[2].split(',')\n self.namedGetCalls[data[0]]=[q,keyset]\n" }, { "alpha_fraction": 0.6927083134651184, "alphanum_fraction": 0.7395833134651184, "avg_line_length": 26.428571701049805, "blob_id": "67eefda811b2191b7f08ff583e48775a9cddb25c", "content_id": "64ef5e3a6c95590d2f0d68c974872c02546dc7f2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 192, "license_type": "no_license", "max_line_length": 47, "num_lines": 7, "path": "/cgi/cgi/v2/Lib/dhcpconfig/__init__.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "__author__='Pradeep CH'\n__version__ ='1.0.0'\n__date__= '10- Mar- 2017'\n\nfrom beans import Config,Pool,Host,Subnet\nfrom JsonParser import JsonParser\nfrom DHCPDConfigParser import DHCPDConfigParser\n" }, { "alpha_fraction": 0.5579150319099426, "alphanum_fraction": 0.5830115675926208, "avg_line_length": 22.0222225189209, "blob_id": "5efc1f59f17f9d4e9de5e6fe96e723562a31c5ae", "content_id": "9ceb187be11b0cd34c0b1c29d61e0dc2be50d19d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1036, "license_type": "permissive", "max_line_length": 73, "num_lines": 45, "path": "/v2/js/scripter/view.js", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "/*\nAuthor : Pradeep CH\nDate : 19-Jun-2017\nVersion : 1.0.0\nSince : 2.0.0\n*/\n\n\ngetScripts()\n\nfunction getScripts(){ \n \tloadResp('Loading scripts. Please wait..')\n\ttarget =cgiPath + 'scripter/scriptmanager.py'\n\tdata = {'action':'getScripts'}\n\tmethod ='POST'\n\t$.triggerCall(target,method,data,loadServers)\n}\n\nfunction loadServers(resp){ \n\tvar obj = jQuery.parseJSON( resp );\n\t \n\tif (obj['status'] =='error'){ \n\t\tloadResp(obj['data'])\n\t}\n\telse{ \n\t\ti = 1 \n\t\t$(obj['data']).each(function() {\n\t\t\tif(this != undefined){ \n\t\t\t\tvar executeLink = '<a href=\"executor.htm?id='+this.id+'\">Execute</a>'\n \t\t\t\tvar col1 = '<td>'+i+'</td>'\n \t\t\t\tvar col2 = '<td>'+this.fileName+'</td>'\n \t\t\t\tvar col3 = '<td>'+this.uploadedBy+'</td>'\n \t\t\t\tvar col4 = '<td>'+this.uploadedOn+'</td>'\n \t\t\t\tvar col5 = '<td>'+executeLink+'</td>'\n\t\t\t\t$('#scripts tr:last').after('<tr>'+col1+col2+col3+col4+col5+'</tr>');\n\t\t\t\ti++; \n\t\t\t}\n\t\t});\n\t\tloadResp('Scripts loaded successfully')\n\t}\n}\n\nfunction loadResp(resp){\t\n\t$('#consoleContent').html(resp).fadeIn(800)\n}\n" }, { "alpha_fraction": 0.8571428656578064, "alphanum_fraction": 0.8571428656578064, "avg_line_length": 27, "blob_id": "ad8afd51027c9d7a8533be2ce74ba0e3caaf4a69", "content_id": "3a0e47894b4e0526ac29f46e9a819ed20490cd03", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 28, "license_type": "no_license", "max_line_length": 27, "num_lines": 1, "path": "/cgi/cgi/v2/Lib/patlogger/__init__.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "from beans import PATLogger\n" }, { "alpha_fraction": 0.7452830076217651, "alphanum_fraction": 0.7452830076217651, "avg_line_length": 20.200000762939453, "blob_id": "2923f99b60b6eaf316a3147c65b18f57bea11514", "content_id": "161e6885a356e3608dec6dafdf15e8518af9c37c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 106, "license_type": "no_license", "max_line_length": 43, "num_lines": 5, "path": "/cgi/cgi/v2/deviceaction/myIp.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "import cgi\nimport os\nprint \"Content-type: text/html\"\nprint \"\"\nprint cgi.escape(os.environ[\"REMOTE_ADDR\"])\n" }, { "alpha_fraction": 0.6605769395828247, "alphanum_fraction": 0.6605769395828247, "avg_line_length": 25, "blob_id": "81d9edbf901ff42c1fde41b061a49e5b8e517471", "content_id": "24d5839ff85b1b114422f2a7613e70f594eb0423", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1040, "license_type": "no_license", "max_line_length": 78, "num_lines": 40, "path": "/cgi/cgi/v2/command/downloadimage.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n \nimport sys\n# Import modules for CGI handling \nimport cgi, cgitb \nimport os\n\ntempLoc = '/tmp/eosimages'\ndef initiateFileDownload(filePath,downloadFileName,remove=False):\n\tif filePath:\n\t\tdata = '' \n\t\twith open(filePath,'r') as f:\n\t\t\tdata = f.read()\n\n\t\tprint 'Content-Disposition: attachment; filename=\"%s\"' % downloadFileName\n\t\t#print \"Content-Length: \" + str(os.stat(fullPath).st_size)\n\t\tprint # empty line between headers and body\n\t\tprint data\n\t\ttry:\n\t\t\tif remove:\n\t\t\t\tos.remove(filePath)\n\t\texcept:\n\t\t\tpass\n\telse:\n\t\tprint \"Content-type:text/html\\r\\n\\r\\n\"\n\t\tprint '{\"status\":\"ERROR\",\"data\":\"No files to download\"}'\n\ndef main():\n form = cgi.FieldStorage()\n fname = form.getvalue('fname') \n filePath = tempLoc+'/' + fname\n assert os.path.exists(filePath),'No image file'\n initiateFileDownload(filePath,fname)\n\nif __name__ == \"__main__\":\n try:\n main()\n except Exception as e :\n print \"Content-type:text/html\\r\\n\\r\\n\"\n print {'status':'error','data':'Something went wrong. Cause :' + str(e)}\n" }, { "alpha_fraction": 0.5655951499938965, "alphanum_fraction": 0.5786272883415222, "avg_line_length": 23.489360809326172, "blob_id": "b02693a3873d41d24ee807d95a4c074ba9d201d6", "content_id": "99d89a739f64b0abf3599a756bcf0e582974ef0b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1151, "license_type": "permissive", "max_line_length": 130, "num_lines": 47, "path": "/v2/js/ssl/newca.js", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "/*\nAuthor : Pradeep CH\nDate : 18-Feb-2017\nVersion : 1.0.0\nSince : 2.0.0\n*/\n\n//Actions\n$(document).on('click', '#reset', function (event) { \n\tevent.preventDefault(); \n\tcn = $('#cn').val('')\n\temail = $('#email').val('')\n\tcountry = $('#country').val('')\n\torg = $('#org').val('')\n\tstate = $('#state').val('')\n\tlocality = $('#locality').val('')\n\torgUnit = $('#orgUnit').val('')\n});\n\n$(document).on('click', '#save', function (event) { \n\tevent.preventDefault(); \n\tloadResp('Creating CA. Please wait..')\n\t\n\tcn = $('#cn').val()\n\temail = $('#email').val()\n\tcountry = $('#country').val()\n\torg = $('#org').val()\n\tstate = $('#state').val()\n\tlocality = $('#locality').val()\n\torgUnit = $('#orgUnit').val()\n\t\n\ttarget = cgiPath + 'ssl/manager.py' \n\tdata = {'action':'addCA','cn':cn,'email':email, 'country':country,'org':org,'state':state,'locality':locality,'orgUnit':orgUnit};\n\tmethod ='POST'\n\n\t$.triggerCall(target,method,data,showResp)\n\t$('#reset').click()\n});\n\nfunction showResp(resp){\n\tconsole.log('recived')\n\tvar obj = jQuery.parseJSON( resp ); \n\tloadResp(obj['data']) \n}\nfunction loadResp(resp){\n\t $('#consoleContent').html(resp).fadeIn(600); \n}\n" }, { "alpha_fraction": 0.5060975551605225, "alphanum_fraction": 0.5853658318519592, "avg_line_length": 11.461538314819336, "blob_id": "1c00e996487ea328470ef3d72487e98c58b480dc", "content_id": "432ed53418a8d08abd585a028d6821a5d88cda98", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 164, "license_type": "permissive", "max_line_length": 28, "num_lines": 13, "path": "/v2/js/serverConfig.js", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "//Jquery extention file\n\n/*\n\nAuthor : Pradeep CH\nVersion : 1.0.0\nSince : 1.0.0\nDate : Sat Feb 4 2017\n\n*/\n\ncontextName = '/pat/v2/'\ncgiPath = '/pat/cgi-bin/v2/'\n\n\n" }, { "alpha_fraction": 0.6306461095809937, "alphanum_fraction": 0.6460834741592407, "avg_line_length": 25.104476928710938, "blob_id": "30889d6175699527b202d9f674297f1646f33576", "content_id": "8c8159c16848d652bb0c16979540588484c922a5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1749, "license_type": "permissive", "max_line_length": 119, "num_lines": 67, "path": "/v2/js/kvm/newkvm.js", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "/*\nAuthor : Pradeep CH\nDate : 10-Feb-2017\nVersion : 1.0.0\nSince : 2.0.0\n*/\n\n\ncheckParam();\n\nfunction checkParam(){ \n\tuid = getParamValue(document.location.href,'uid') \n\tif(uid==undefined){\n\t\treturn\n\t}\n\tloadResp('Loading server info..') \n\t$('#updateKVMServerID').val(uid)\n\tgetServerInfo(uid)\n}\n\nfunction getServerInfo(uid){ \n\t$('#kvmViewConsole').fadeOut(800,function(){$('#kvmViewLoading').html('Loading servers. Please wait..').fadeIn(200)}) \n\ttarget =cgiPath + 'kvm/kvmmanger.py'\n\tdata = {'action':'loadServer','uid':uid,'uname':'','pwd':''}\n\tmethod ='POST'\n\t$.triggerCall(target,method,data,loadServer)\n}\n\nfunction loadServer(resp){ \n\tvar obj = jQuery.parseJSON( resp );\n\tif (obj['status'] =='error'){ \n\t\tloadResp(obj['data'])\n\t}\n\telse{ \n\t\tserver = obj['data']\n\t\t$('#newkvmip').val(server.ip)\n\t\t$('#newkvmusername').val(server.username)\n\t\t$('#newkvmpassword').val(server.password)\n\t\tloadResp('Server loaded successfully')\n\t}\n}\n\n\n//Actions\n$(document).on('click', '#newkvmreset', function (event) { \n\tevent.preventDefault(); \n\t$('#newkvmip').val('')\n\t$('#newkvmusername').val('')\n\t$('#newkvmpassword').val('')\n});\n\n$(document).on('click', '#savekvmserver', function (event) { \n\tevent.preventDefault(); \n\tip = $('#newkvmip').val()\n\tusername = $('#newkvmusername').val()\n\tpassword = $('#newkvmpassword').val()\n\t$('#savekvmconsole').html('Saving server info. Please wait').fadeIn(600); \n\ttarget =cgiPath + 'kvm/kvmmanger.py'\n\tdata = {'action':'add','ip':ip,'uname':username,'pwd':password,'uid':$('#updateKVMServerID').val()}\n\tmethod ='POST'\n\t$.triggerCall(target,method,data,loadResp)\n\t$('#newkvmreset').click()\n});\n\nfunction loadResp(resp){\n\t$('#savekvmconsole').fadeOut(100,function(){$('#savekvmconsole').html(resp).fadeIn(600);});\n}\n" }, { "alpha_fraction": 0.7114788293838501, "alphanum_fraction": 0.7145811915397644, "avg_line_length": 22.585365295410156, "blob_id": "fa346088c791272cb0dbce00b9a1a7b504f3ca13", "content_id": "bd9afd707c9fa91cfbee36ae93efe8a3175f3894", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 967, "license_type": "no_license", "max_line_length": 127, "num_lines": 41, "path": "/cgi/cgi/v2/ssh/remotessh.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n# Import modules for CGI handling \nimport cgi, cgitb \n\nimport os\nimport sys\nsys.path.append('../Lib')\n\nfrom htmlutil import html \nfrom sshclient import SSHClient\n#\n#Author Pradeep CH\n#\n__author__ ='Pradeep CH'\n__version__='1.0.0'\n \n\n# Create instance of FieldStorage \nform = cgi.FieldStorage() \nclientIp = cgi.escape(os.environ[\"REMOTE_ADDR\"])\n\nbodyContent = ''#html.getBackButton('/pat/ssh/remotessh.htm')\n\n# Get mode from fields \nun = form.getvalue('username')\nip = form.getvalue('ip')\n\nif not ip or not un:\n\tbodyContent +='IP and Username required.' \nelse:\n\tbodyContent += 'Your machine IP is %s <br>' %(str(clientIp)) \n\ttry:\n\t\tcl = SSHClient(clientIp)\n\t\tcl.enableSSH(ip,un)\n\t\tbodyContent +='SSH successfull'\n\texcept Exception as e:\n\t\tbodyContent += 'Could not connect to remote machine. Make sure the PAT plugin is running in your machine, Cause : %s' %str(e)\n#print html\nhtml.printHeader('Authetication Validation')\nhtml.printBodyContent(bodyContent)\n" }, { "alpha_fraction": 0.5895900726318359, "alphanum_fraction": 0.602487325668335, "avg_line_length": 24.845237731933594, "blob_id": "3fc5fee2e646322c46b35130ad68c4458da05f0b", "content_id": "67ece3ecca5fdfba1f57cc0a748413d910be63c7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 2171, "license_type": "permissive", "max_line_length": 73, "num_lines": 84, "path": "/v2/js/library/upload.js", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "/*\nAuthor : Pradeep CH\nDate : 06-May-2017\nVersion : 1.0.0\nSince : 2.0.0\n*/\n\n$(document).on('click', '#submit', function (event) { \n showConsole('Uploading file. Please wait...')\n event.preventDefault();\n var files= $('#sourcefile')[0].files\n if(files.length==0){\n showConsole('No file selected')\n return\n }\n var file = files[0]\n var data = new FormData();\n data.append('sourcefile', file)\n data.append('name', $('#name').val())\n data.append('subject', $('#subject').val())\n data.append('desc', $('#desc').val())\n data.append('action', 'upload') \n data.append('mediatype', $('#mediatype').val()) \n target = cgiPath +'library/manage.py'\n $.triggerPOSTCallWithoutContentType(target,data,loadPushResp);\n});\n$(document).on('change','#sourcefile',function(){\n\n $('#name').val('')\n var files= $('#sourcefile')[0].files\n if(files.length==0){\n $('#fileLabel').val('Select an image file');\n return\n }\n $('#fileLabel').val(files[0].name);\n\tautoSelect(files[0].name);\n});\nfunction autoSelect(fileName){\n\t$('#name').val(fileName)\t\n\tslashIndex = fileName.lastIndexOf('/')\n fname = fileName\n if(slashIndex>-1){\n\t\tfname = fileName.substring(slashIndex)\n\t}\n\tdotindex = fname.lastIndexOf('.')\n\textension = ''\n\tif(dotindex>-1 && dotindex+1<fname.length){\n\t\textention = fname.substring(dotindex+1)\n\t}\n\tif(extention == ''){\n\t\treturn;\t\n\t}\n\tif( extention.toLowerCase() === 'pdf' ){\n\t\t$('#mediatype').val('pdf')\n\t}else if (['mp4','avi','mkv'].includes(extention.toLowerCase())){\n\t\t$('#mediatype').val('video')\t\n\t}else if (['jpg','jpeg','png','gif'].includes(extention.toLowerCase())){\n\t\t$('#mediatype').val('image')\n\t}else if (['mp3','ogg','wav'].includes(extention.toLowerCase())){\n\t\t$('#mediatype').val('audio')\n\t}else{\n\t\t$('#mediatype').val('other')\n\t}\n\t\n\n}\n\nfunction loadPushResp(resp){\n if(resp===undefined){\n loadResp('Invalid resp')\n return\n }\n\n var obj = jQuery.parseJSON( resp );\n if(obj['status']=='error'){\n showConsole(obj['data'])\n }else{ \n $('#reset').click()\n showConsole(obj['data'])\n }\n}\nfunction showConsole(resp){\n\t $('#consoleContent').html(resp).fadeIn(600); \n}\n" }, { "alpha_fraction": 0.5965250730514526, "alphanum_fraction": 0.6048905849456787, "avg_line_length": 26.696428298950195, "blob_id": "941b9c6dde9f07a3cd69adc1e8b06f1950202cd2", "content_id": "14194539e7704bb0b3e30aebce24df8bb0c873db", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1554, "license_type": "no_license", "max_line_length": 100, "num_lines": 56, "path": "/cgi/cgi/v2/Lib/vmactions/getDataStore.py", "repo_name": "pradeeppanayal/pat", "src_encoding": "UTF-8", "text": "\n\"\"\"\nPython program for listing the vms on an ESX / vCenter host with datastore\n\"\"\"\n \nimport atexit \nimport sys\nimport re\n\nsys.path.append('../')\n\nfrom pyVim.connect import SmartConnect, Disconnect\nfrom pyVmomi import vim\n\ndef getDataStore(serverIp,username,password):\n #connect\n si = SmartConnect(host=serverIp,\n user=username,\n pwd=password) \n\n if not si:\n assert False,\"Could not connect to the specified host using specified username and password\" \n\n atexit.register(Disconnect, si)\n\n content = si.RetrieveContent()\n vms = {}\n for child in content.rootFolder.childEntity:\n if hasattr(child, 'vmFolder'):\n datacenter = child\n vmFolder = datacenter.vmFolder\n vmList = vmFolder.childEntity\n for vm in vmList: \n m = re.search('\\[(.*)\\]',vm.config.files.vmPathName)\n dataStore = 'No data'\n if m:\n dataStore = m.group(1) \n mac = getmac(vm)\n vms[vm.name] = {'datastore':dataStore,'moid':vm._moId,'mac':mac} \n return vms\n\ndef getmac(vm):\n try:\n devices = vm.config.hardware.device\n for device in devices:\n if('Network adapter' not in device.deviceInfo.label):\n continue\n connectStatus = 'Not connected'\n if device.connectable.connected:\n connectStatus = 'Connected'\n return device.macAddress \n except:\n return 'Error'\n\n\nif __name__=='__main__':\n print getDataStore('10.10.100.201','root','Payoda#89')\n \n" } ]
72
gintautasp12/electra
https://github.com/gintautasp12/electra
1a9c4e2ebd2e225ef0fd2a857da9aec63cfe032f
9b4f9e6efee68baa703cf35fde447fe21fb3acf6
de47410b134bd4ae59e71aad111d8d34934fb9b3
refs/heads/master
2023-05-09T06:25:27.252876
2021-05-28T06:08:37
2021-05-28T06:08:37
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5846372842788696, "alphanum_fraction": 0.5889046788215637, "avg_line_length": 29.565217971801758, "blob_id": "704ef28eced949bc6423fcee8853d0a1f1fec553", "content_id": "57f2413e04c72072ee7dd6e1c0ec07992d674f0b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 703, "license_type": "no_license", "max_line_length": 113, "num_lines": 23, "path": "/test_qa.py", "repo_name": "gintautasp12/electra", "src_encoding": "UTF-8", "text": "from tqdm import tqdm\n\nfrom predict_single import predict\n\n\nif __name__ == '__main__':\n with open('test_qa.txt', 'r') as file:\n content = file.readlines()\n\n context_questions = []\n for line in content:\n qq = {'context': line.split(' ,')[0], 'question': line.split(' ,')[1]}\n context_questions.append(qq)\n\n print(\"[] Starting predictions...\")\n results = []\n for idx, qq in enumerate(tqdm(context_questions)):\n results.append(predict(context=qq['context'], question=qq['question'].rstrip(), id=idx, model='model_8'))\n\n print(\"[] Writing results...\")\n with open('results.txt', 'a') as file:\n for ans in results:\n file.write(f\"{ans}\\n\")\n" }, { "alpha_fraction": 0.8478260636329651, "alphanum_fraction": 0.8478260636329651, "avg_line_length": 45, "blob_id": "255dd10ae6d23c47a4f03ac78f0f107b367340fe", "content_id": "7ee6b2aaf8c04c5c060e368b257b28b45386a772", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 46, "license_type": "no_license", "max_line_length": 45, "num_lines": 1, "path": "/utils/__init__.py", "repo_name": "gintautasp12/electra", "src_encoding": "UTF-8", "text": "from .utils import postprocess_qa_predictions\n" }, { "alpha_fraction": 0.6384251713752747, "alphanum_fraction": 0.6437795162200928, "avg_line_length": 36.94820785522461, "blob_id": "ff36716ea14e4443c908117b2649d9b53d92eb85", "content_id": "abe93ad82653330a043ed89bf914ba69ef4637ca", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9525, "license_type": "no_license", "max_line_length": 120, "num_lines": 251, "path": "/predict_single.py", "repo_name": "gintautasp12/electra", "src_encoding": "UTF-8", "text": "\"\"\"\nGintautas Plonis 1812957\nElectra | SQuAD 2.0\n(Optional) REST API\n\"\"\"\nimport argparse\n\nimport numpy as np\nimport torch\nfrom accelerate import Accelerator\nfrom datasets import Dataset\nfrom transformers import AutoConfig, EvalPrediction, ElectraForQuestionAnswering, ElectraTokenizerFast\n\nfrom utils import postprocess_qa_predictions\n\n\ndef parse_args():\n parser = argparse.ArgumentParser(description=\"Finetune a transformers model on a Question Answering task\")\n parser.add_argument(\n \"--dataset_name\",\n type=str,\n default=None,\n help=\"The name of the dataset to use (via the datasets library).\",\n )\n parser.add_argument(\n \"--train_file\",\n type=str,\n default=\"dataset/train-v2.json\",\n help=\"A csv or a json file containing the training data.\"\n )\n parser.add_argument(\n \"--model_name_or_path\",\n type=str,\n default=\"google/electra-small-discriminator\",\n help=\"Path to pretrained model or model identifier from huggingface.co/models.\",\n )\n parser.add_argument(\n \"--validation_file\",\n type=str,\n default=None,\n help=\"A csv or a json file containing the validation data.\"\n )\n parser.add_argument(\n \"--test_file\",\n type=str,\n default=\"dataset/test-v2.json\",\n help=\"A csv or a json file containing the Prediction data.\"\n )\n parser.add_argument(\n \"--max_predict_samples\",\n type=int,\n default=None,\n help=\"For debugging purposes or quicker training, truncate the number of prediction examples to this\",\n )\n parser.add_argument(\n \"--preprocessing_num_workers\",\n type=int,\n default=4,\n help=\"The number of processes to use for the preprocessing.\",\n )\n parser.add_argument(\n \"--overwrite_cache\", type=bool, default=False, help=\"Overwrite the cached training and evaluation sets\"\n )\n parser.add_argument(\n \"--pad_to_max_length\",\n action=\"store_true\",\n help=\"If passed, pad all samples to `max_seq_length`. Otherwise, dynamic padding is used.\",\n )\n parser.add_argument(\n \"--max_seq_length\",\n type=int,\n default=384,\n help=\"The maximum total input sequence length after tokenization. Sequences longer than this will be truncated,\"\n \" sequences shorter will be padded if `--pad_to_max_length` is passed.\",\n )\n parser.add_argument(\n \"--per_device_train_batch_size\",\n type=int,\n default=8,\n help=\"Batch size (per device) for the training dataloader.\",\n )\n parser.add_argument(\n \"--output_dir\",\n type=str,\n default=\"predictions\",\n help=\"Where to store the final model.\")\n\n return parser.parse_args()\n\n\ndef create_and_fill_np_array(start_or_end_logits, dataset, max_len):\n \"\"\"\n Create and fill numpy array of size len_of_validation_data * max_length_of_output_tensor\n\n Args:\n start_or_end_logits(:obj:`tensor`):\n This is the output predictions of the model. We can only enter either start or end logits.\n eval_dataset: Evaluation dataset\n max_len(:obj:`int`):\n The maximum length of the output tensor. ( See the model.eval() part for more details )\n \"\"\"\n\n step = 0\n # create a numpy array and fill it with -100.\n logits_concat = np.full((len(dataset), max_len), -100, dtype=np.float64)\n # Now since we have create an array now we will populate it with the outputs gathered using accelerator.gather\n for i, output_logit in enumerate(start_or_end_logits): # populate columns\n # We have to fill it such that we have to take the whole tensor and replace it on the newly created array\n # And after every iteration we have to change the step\n\n batch_size = output_logit.shape[0]\n cols = output_logit.shape[1]\n\n if step + batch_size < len(dataset):\n logits_concat[step: step + batch_size, :cols] = output_logit\n else:\n logits_concat[step:, :cols] = output_logit[: len(dataset) - step]\n\n step += batch_size\n\n return logits_concat\n\n\ndef predict(question, context, id, model=None):\n args = parse_args()\n accelerator = Accelerator()\n\n question_column_name = \"question\"\n context_column_name = \"context\"\n\n config = AutoConfig.from_pretrained('google/electra-small-discriminator')\n tokenizer = ElectraTokenizerFast.from_pretrained('google/electra-small-discriminator')\n model = ElectraForQuestionAnswering.from_pretrained(\n args.model_name_or_path if model is None else model,\n from_tf=False,\n config=config,\n )\n\n def prepare_validation_features(examples):\n tokenized_examples = tokenizer(\n examples[question_column_name if pad_on_right else context_column_name],\n examples[context_column_name if pad_on_right else question_column_name],\n truncation=\"only_second\" if pad_on_right else \"only_first\",\n max_length=max_seq_length,\n stride=128,\n return_overflowing_tokens=True,\n return_offsets_mapping=True,\n padding=\"max_length\" if args.pad_to_max_length else False,\n )\n\n # Since one example might give us several features if it has a long context, we need a map from a feature to\n # its corresponding example. This key gives us just that.\n sample_mapping = tokenized_examples.pop(\"overflow_to_sample_mapping\")\n\n # For evaluation, we will need to convert our predictions to substrings of the context, so we keep the\n # corresponding example_id and we will store the offset mappings.\n tokenized_examples[\"example_id\"] = []\n\n for i in range(len(tokenized_examples[\"input_ids\"])):\n # Grab the sequence corresponding to that example (to know what is the context and what is the question).\n sequence_ids = tokenized_examples.sequence_ids(i)\n context_index = 1 if pad_on_right else 0\n\n # One example can give several spans, this is the index of the example containing this span of text.\n sample_index = sample_mapping[i]\n tokenized_examples[\"example_id\"].append(examples[\"id\"][sample_index])\n\n # Set to None the offset_mapping tha\n # t are not part of the context so it's easy to determine if a token\n # position is part of the context or not.\n tokenized_examples[\"offset_mapping\"][i] = [\n (o if sequence_ids[k] == context_index else None)\n for k, o in enumerate(tokenized_examples[\"offset_mapping\"][i])\n ]\n\n return tokenized_examples\n\n max_seq_length = min(args.max_seq_length, tokenizer.model_max_length)\n pad_on_right = tokenizer.padding_side == \"right\"\n\n def post_processing_function(examples, features, predictions, stage=\"eval\"):\n # Post-processing: we match the start logits and end logits to answers in the original context.\n predictions = postprocess_qa_predictions(\n examples=examples,\n features=features,\n predictions=predictions,\n version_2_with_negative=True,\n n_best_size=10,\n max_answer_length=30,\n null_score_diff_threshold=0,\n output_dir=args.output_dir,\n prefix=stage,\n )\n\n # Format the result to the format the metric expects.\n formatted_predictions = [\n {\"id\": k, \"prediction_text\": v, \"no_answer_probability\": 0.0} for k, v in predictions.items()\n ]\n\n return EvalPrediction(predictions=formatted_predictions, label_ids=[])\n\n examples = {\n \"question\": [question],\n \"context\": [context],\n \"id\": [id],\n \"answers\": [\"...\"]}\n predict_data = prepare_validation_features(examples)\n\n predict_examples = Dataset.from_dict(examples)\n predict_dataset = Dataset.from_dict(predict_data.data)\n\n predict_data['input_ids'] = torch.LongTensor(np.array(predict_data['input_ids']))\n predict_data['token_type_ids'] = torch.LongTensor(np.array(predict_data['token_type_ids']))\n predict_data['attention_mask'] = torch.LongTensor(np.array(predict_data['attention_mask']))\n\n del predict_data['offset_mapping']\n del predict_data['example_id']\n\n with torch.no_grad():\n outputs = model(**predict_data)\n start_logits = outputs.start_logits\n\n start_logits = accelerator.pad_across_processes(start_logits, dim=1, pad_index=-100)\n end_logits = accelerator.pad_across_processes(start_logits, dim=1, pad_index=-100)\n\n all_start_logits = [accelerator.gather(start_logits).cpu().numpy()]\n all_end_logits = [accelerator.gather(end_logits).cpu().numpy()]\n\n max_len = max([x.shape[1] for x in all_start_logits]) # Get the max_length of the tensor\n\n # concatenate the numpy array\n start_logits_concat = create_and_fill_np_array(all_start_logits, predict_dataset, max_len)\n end_logits_concat = create_and_fill_np_array(all_end_logits, predict_dataset, max_len)\n\n # delete the list of numpy arrays\n del all_start_logits\n del all_end_logits\n\n outputs_numpy = (start_logits_concat, end_logits_concat)\n prediction = post_processing_function(predict_examples, predict_dataset, outputs_numpy)\n\n return prediction.predictions[0]['prediction_text']\n\n\nif __name__ == \"__main__\":\n result = predict(\n question=\"What is my name?\",\n context=\"My name is John\",\n id=1\n )\n print(result)\n" }, { "alpha_fraction": 0.6516724228858948, "alphanum_fraction": 0.6735870838165283, "avg_line_length": 25.272727966308594, "blob_id": "08503a0cb46b41ed2ffa8eedd8239adc11c4aad9", "content_id": "ca43aec2ea88038bcbe343d502933bf9611a2ca4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 867, "license_type": "no_license", "max_line_length": 113, "num_lines": 33, "path": "/api.py", "repo_name": "gintautasp12/electra", "src_encoding": "UTF-8", "text": "\"\"\"\nGintautas Plonis 1812957\nElectra | SQuAD 2.0\n(Optional) REST API\n\"\"\"\nimport json\n\nimport flask as flask\nfrom flask import request\n\nfrom predict_single import predict\n\napp = flask.Flask(__name__)\napp.config[\"DEBUG\"] = True\n\n\[email protected]('/', methods=['GET'])\ndef index():\n return json.dumps({'success': True}), 200, {'ContentType': 'application/json'}\n\n\[email protected]('/predict', methods=['POST'])\ndef post():\n content = request.get_json(silent=True)\n if content is None or 'question' not in content or 'context' not in content or 'id' not in content:\n return json.dumps({'message': 'Payload unsupported.'}), 400, {'ContentType': 'application/json'}\n\n answer = predict(question=content['question'], context=content['context'], id=content['id'], model='model_8')\n\n return json.dumps({'answer': answer}), 200, {'ContentType': 'application/json'}\n\n\napp.run()\n" }, { "alpha_fraction": 0.4896851181983948, "alphanum_fraction": 0.69923996925354, "avg_line_length": 16.37735939025879, "blob_id": "81ddb9f642d9d695ed9bf43b5fff5783c6739743", "content_id": "03d6ccf45f4c93dcdf55232dfb14b4ecb7c492b0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 921, "license_type": "no_license", "max_line_length": 30, "num_lines": 53, "path": "/requirements.txt", "repo_name": "gintautasp12/electra", "src_encoding": "UTF-8", "text": "absl-py==0.12.0\naccelerate==0.3.0\ncachetools==4.2.2\ncertifi==2020.12.5\nchardet==4.0.0\nclick==8.0.1\ncolorama==0.4.4\ndatasets==1.6.2\ndill==0.3.3\nfilelock==3.0.12\nFlask==2.0.1\nfsspec==2021.5.0\ngoogle-auth==1.30.1\ngoogle-auth-oauthlib==0.4.4\ngrpcio==1.38.0\nhuggingface-hub==0.0.8\nidna==2.10\nitsdangerous==2.0.1\nJinja2==3.0.1\njoblib==1.0.1\nMarkdown==3.3.4\nMarkupSafe==2.0.1\nmultiprocess==0.70.11.1\nnumpy==1.20.3\noauthlib==3.1.0\npackaging==20.9\npandas==1.2.4\nprotobuf==3.17.1\npyaml==20.4.0\npyarrow==4.0.0\npyasn1==0.4.8\npyasn1-modules==0.2.8\npyparsing==2.4.7\npython-dateutil==2.8.1\npytz==2021.1\nPyYAML==5.4.1\nregex==2021.4.4\nrequests==2.25.1\nrequests-oauthlib==1.3.0\nrsa==4.7.2\nsacremoses==0.0.45\nsix==1.16.0\ntensorboard==2.5.0\ntensorboard-data-server==0.6.1\ntensorboard-plugin-wit==1.8.0\ntokenizers==0.10.2\ntorch==1.8.1\ntqdm==4.60.0\ntransformers==4.6.1\ntyping-extensions==3.10.0.0\nurllib3==1.26.4\nWerkzeug==2.0.1\nxxhash==2.0.2\n" } ]
5
pariyani37/data_structures
https://github.com/pariyani37/data_structures
46332ccb4dece1c173af3d12185d5708f3571885
233614bd17a1b53edb03f5ee507033af0302c2fa
65afea638ac869330b16b12774a1c4687d4e1883
refs/heads/master
2020-03-24T11:41:12.539178
2018-08-12T18:55:21
2018-08-12T18:55:21
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6140350699424744, "alphanum_fraction": 0.6423751711845398, "avg_line_length": 26.44444465637207, "blob_id": "54dbabb1457d7be5b9419a4d3a6eaa6d8c73c13b", "content_id": "0ac1889cf315057f29d08bcc37259264a59ec44c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1482, "license_type": "no_license", "max_line_length": 73, "num_lines": 54, "path": "/driver.py", "repo_name": "pariyani37/data_structures", "src_encoding": "UTF-8", "text": "from data_structures import *\n\n# Code execution starts here\nif __name__=='__main__':\n \n # Start with the empty list\n llist = LinkedList()\n \n # Insert 6. So linked list becomes 6->None\n llist.append(6)\n \n # Insert 7 at the beginning. So linked list becomes 7->6->None\n llist.push(7);\n \n # Insert 1 at the beginning. So linked list becomes 1->7->6->None\n llist.push(6);\n \n # Insert 4 at the end. So linked list becomes 1->7->6->4->None\n llist.append(4)\n #llist.deleteNode(6)\n \n # Insert 8, after 7. So linked list becomes 1 -> 7-> 8-> 6-> 4-> None\n llist.insertAfter(llist.head.next, 8)\n\n cllist = CircularLinkedList()\n cllist.push(12)\n cllist.push(56)\n cllist.push(2)\n cllist.push(11)\n \n print \"Contents of circular Linked List\"\n cllist.printList()\n #print 'Created linked list is:',\n llist.printList()\n #print 'length is: ', llist.getCount()\n #print llist.search(8)\n #print llist.getNth(3)\n #print llist.freq(7)\n #print llist.detectLoop()\n llist.reverse()\n print('\\n')\n llist.printList()\n #print llist.countNodesinLoop()\n root = TreeNode(1)\n root.left = TreeNode(2)\n root.right = TreeNode(3)\n root.left.left = TreeNode(4)\n root.left.right = TreeNode(5)\n print \"Preorder traversal of binary tree is\"\n printPreorder(root)\n print \"\\nInorder traversal of binary tree is\"\n printInorder(root)\n print \"\\nPostorder traversal of binary tree is\"\nprintPostorder(root)\n" }, { "alpha_fraction": 0.544239342212677, "alphanum_fraction": 0.5503925085067749, "avg_line_length": 24.848901748657227, "blob_id": "a0a1d45b201092542424fac129c50209eb3fec2a", "content_id": "b8a7a1d3971b244b862b03d389c15f1af8d8c01b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9426, "license_type": "no_license", "max_line_length": 67, "num_lines": 364, "path": "/data_structures.py", "repo_name": "pariyani37/data_structures", "src_encoding": "UTF-8", "text": "# A complete working Python program to demonstrate all\n# insertion methods of linked list\n \n# Node class\nclass Node:\n \n # Function to initialise the node object\n def __init__(self, data):\n self.data = data # Assign data\n self.next = None # Initialize next as null\n \n \n# Linked List class contains a Node object\nclass LinkedList:\n \n # Function to initialize head\n def __init__(self):\n self.head = None\n \n \n # Functio to insert a new node at the beginning\n def push(self, new_data):\n \n # 1 & 2: Allocate the Node &\n # Put in the data\n new_node = Node(new_data)\n \n # 3. Make next of new Node as head\n new_node.next = self.head\n \n # 4. Move the head to point to new Node\n self.head = new_node\n \n \n # This function is in LinkedList class. Inserts a\n # new node after the given prev_node. This method is\n # defined inside LinkedList class shown above */\n def insertAfter(self, prev_node, new_data):\n \n # 1. check if the given prev_node exists\n if prev_node is None:\n print \"The given previous node must inLinkedList.\"\n return\n \n # 2. create new node &\n # Put in the data\n new_node = Node(new_data)\n \n # 4. Make next of new Node as next of prev_node\n new_node.next = prev_node.next\n \n # 5. make next of prev_node as new_node\n prev_node.next = new_node\n \n \n # This function is defined in Linked List class\n # Appends a new node at the end. This method is\n # defined inside LinkedList class shown above */\n def append(self, new_data):\n \n # 1. Create a new node\n # 2. Put in the data\n # 3. Set next as None\n new_node = Node(new_data)\n \n # 4. If the Linked List is empty, then make the\n # new node as head\n if self.head is None:\n self.head = new_node\n return\n \n # 5. Else traverse till the last node\n last = self.head\n while (last.next):\n last = last.next\n \n # 6. Change the next of last node\n last.next = new_node\n \n \n # Utility function to print the linked list\n def printList(self):\n temp = self.head\n while (temp):\n print temp.data,\n temp = temp.next\n\n\n def deleteNode(self, key):\n \n # Store head node\n temp = self.head\n \n # If head node itself holds the key to be deleted\n if (temp is not None):\n if (temp.data == key):\n self.head = temp.next\n temp = None\n return\n \n # Search for the key to be deleted, keep track of the\n # previous node as we need to change 'prev.next'\n while(temp is not None):\n if temp.data == key:\n break\n prev = temp\n temp = temp.next\n \n # if key was not present in linked list\n if(temp == None):\n return\n \n # Unlink the node from linked list\n prev.next = temp.next\n \n temp = None\n\n # This function counts number of nodes in Linked List\n # iteratively, given 'node' as starting node.\n def getCount(self):\n temp = self.head # Initialise temp\n count = 0 # Initialise count\n \n # Loop while end of linked list is not reached\n while (temp):\n count += 1\n temp = temp.next\n return count\n\n\n # This Function checks whether the value\n # x present in the linked list \n def search(self, x):\n # Initialize current to head\n current = self.head\n # loop till current not equal to None\n while current != None:\n if current.data == x:\n return True # data found\n current = current.next\n return False # Data Not found\n\n\n # Returns data at given index in linked list\n def getNth(self, index):\n current = self.head # Initialise temp\n self2=LinkedList()\n self2.head=current.next\n count = 0 # Index of current node\n if(count==index):\n return current.data\n return LinkedList.getNth(self2,index-1)\n\n def freq(self, search_for):\n current = self.head\n count = 0\n while(current is not None):\n if current.data == search_for:\n count += 1\n current = current.next\n return count\n\n def detectLoop(self):\n slow_p = self.head\n fast_p = self.head\n while(slow_p and fast_p and fast_p.next):\n slow_p = slow_p.next\n fast_p = fast_p.next.next\n if slow_p == fast_p:\n print \"Found Loop\"\n return\n \n\n def removeLoop(self, loop_node):\n # Set a pointer to the beginning of the linked \n # list and move it one by one to find the first\n # node which is part of the linked list\n ptr1 = self.head\n while(1):\n # Now start a pointer from loop_node and check\n # if it ever reaches ptr2\n ptr2 = loop_node\n while(ptr2.next!= loop_node and ptr2.next !=ptr1):\n ptr2 = ptr2.next\n \n # If ptr2 reached ptr1 then there is a loop.\n # So break the loop\n if ptr2.next == ptr1 : \n break\n \n ptr1 = ptr1.next\n\n def countNodes(self):\n res=1\n current=self.head\n while(current!=current.next):\n res=res+1\n current=current.next\n return res\n def countNodesinLoop(self):\n slow_p = self.head\n fast_p = self.head\n while(slow_p and fast_p and fast_p.next):\n slow_p = slow_p.next\n fast_p = fast_p.next.next\n if slow_p == fast_p:\n self2=LinkedList(slow_p)\n return countNodes(self2)\n def reverse(self):\n prev = None\n current = self.head\n while(current is not None):\n next = current.next\n current.next = prev\n prev = current\n current = next\n self.head = prev\n\n \nclass CircularLinkedList:\n \n # Constructor to create a empty circular linked list\n def __init__(self):\n self.head = None\n \n # Function to insert a node at the beginning of a\n # circular linked list\n def push(self, data):\n ptr1 = Node(data)\n temp = self.head\n \n ptr1.next = self.head\n \n # If linked list is not None then set the next of\n # last node\n if self.head is not None:\n while(temp.next != self.head):\n temp = temp.next\n temp.next = ptr1\n \n else:\n ptr1.next = ptr1 # For the first node\n \n self.head = ptr1 \n \n # Function to print nodes in a given circular linked list\n def printList(self):\n temp = self.head\n if self.head is not None:\n while(True):\n print \"%d\" %(temp.data),\n temp = temp.next\n if (temp == self.head):\n break\n\n\nclass Stack:\n def __init__(self):\n self.items = []\n\n def isEmpty(self):\n return self.items == []\n\n def push(self, item):\n self.items.append(item)\n\n def pop(self):\n return self.items.pop()\n\n def peek(self):\n return self.items[len(self.items)-1]\n\n def size(self):\n return len(self.items)\n\nclass TreeNode:\n def __init__(self,key):\n self.left = None\n self.right = None\n self.val = key\n\n#Tree Traversal\n\ndef printInorder(root):\n \n if root:\n \n # First recur on left child\n printInorder(root.left)\n \n # then print the data of node\n print(root.val),\n \n # now recur on right child\n printInorder(root.right)\n \n \n \n# A function to do postorder tree traversal\ndef printPostorder(root):\n \n if root:\n \n # First recur on left child\n printPostorder(root.left)\n \n # the recur on right child\n printPostorder(root.right)\n \n # now print the data of node\n print(root.val),\n \n \n# A function to do postorder tree traversal\ndef printPreorder(root):\n if root:\n \n # First print the data of node\n print(root.val),\n \n # Then recur on left child\n printPreorder(root.left)\n \n # Finally recur on right child\n printPreorder(root.right)\n\n# Recursive Python program for level order traversal of Binary Tree\n \n\n# Function to print level order traversal of tree\ndef printLevelOrder(root):\n h = height(root)\n for i in range(1, h+1):\n printGivenLevel(root, i)\n \n \n# Print nodes at a given level\ndef printGivenLevel(root , level):\n if root is None:\n return\n if level == 1:\n print \"%d\" %(root.data),\n elif level > 1 :\n printGivenLevel(root.left , level-1)\n printGivenLevel(root.right , level-1)\n \n \n\"\"\" Compute the height of a tree--the number of nodes\n along the longest path from the root node down to\n the farthest leaf node\n\"\"\"\ndef height(node):\n if node is None:\n return 0\n else :\n # Compute the height of each subtree \n lheight = height(node.left)\n rheight = height(node.right)\n \n #Use the larger one\n if lheight > rheight :\n return lheight+1\n else:\n return rheight+1\n \n\n \n\n\n \n \n" }, { "alpha_fraction": 0.8466257452964783, "alphanum_fraction": 0.8466257452964783, "avg_line_length": 53.66666793823242, "blob_id": "d004bb4b7676641fb1fb59d36760b8cb75939eea", "content_id": "a6e7eb5f6350ace2bb65695bf8c5e5d22e9154cb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 163, "license_type": "no_license", "max_line_length": 84, "num_lines": 3, "path": "/readme.md", "repo_name": "pariyani37/data_structures", "src_encoding": "UTF-8", "text": "A basic python implementation of basic data structures used in computer programming.\n\ndriver.py contains the implementation code for the library data_structures.py" } ]
3
wuyan0714/math_homework
https://github.com/wuyan0714/math_homework
a6065910e69cb5526248d593c165f81002d25d12
eac8dc21995ed634ff7a4bfef356ccd267c1760a
76d7900ced67f1c0ac118004b9aff8bdcee45eae
refs/heads/master
2020-09-16T21:36:51.257410
2019-11-25T08:12:42
2019-11-25T08:12:42
223,894,405
0
1
null
null
null
null
null
[ { "alpha_fraction": 0.4320261478424072, "alphanum_fraction": 0.45686274766921997, "avg_line_length": 24.516666412353516, "blob_id": "ebf37b3f98cc0d5700ade881ca1834085fe53c91", "content_id": "45a55680084f0acbc323c8f35c8232e6a3de0710", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1668, "license_type": "no_license", "max_line_length": 63, "num_lines": 60, "path": "/Coplete_principal_elimination.py", "repo_name": "wuyan0714/math_homework", "src_encoding": "UTF-8", "text": "'''完全主元素消去法'''\nimport numpy as np\n#按列选主元\ndef max(a,k):\n i ,j = np.where(np.abs(a[k:,k:]==np.max(np.abs(a[k:,k:]))))\n ik = i[0]+k\n jk = j[0]+k\n return ik,jk\n#第k+1次对第i+1行进行消元\ndef elimination_line(a,b,k,i):\n m = a[i][k]/a[k][k]\n for j in range(k+1,len(a)):\n a[i][j] = a[i][j] - m*a[k][j]\n a[i][k] = 0\n b[i] = b[i]-m*b[k]\n return a,b\n#对于n*n阶矩阵,需要进行n-1次迭代,每次需要从第k+1行到第n行进行消元\ndef elimination(a,b):\n IZ = np.arange(len(a))\n for k in range(0,len(a)-1):\n ik,jk = max(a,k)\n if a[ik,jk]==0:\n print('此矩阵非奇异')\n return\n if ik!=k:\n a[[ik, k], :] = a[[k, ik], :]\n b[[ik,k]] = b[[k,ik]]\n if jk!=k:\n a[:,[jk,k]] = a[:,[k,jk]]\n IZ[[jk, k]] = IZ[[k, jk]]\n for i in range(k+1,len(a)):\n elimination_line(a,b,k,i)\n return a,b,IZ\ndef back_to_generation(a,b,IZ):\n y = np.zeros(len(a))\n y[-1] = b[-1]/a[-1][-1]\n for i in range(len(a)-2,-1,-1):\n t = 0\n for j in range(i+1,len(a)):\n t = t+a[i][j]*y[j]\n y[i] = (b[i]-t)/a[i][i]\n IZ_y = np.vstack((IZ, y))\n IZ_y = IZ_y.T\n IZ_y= np.sort(IZ_y,axis=0)\n x = IZ_y[:,1]\n return x\n#对a,b进行消元计算与回代计算\ndef gauss_elimination(a,b):\n\n\n a,b,IZ= elimination(a,b)\n x = back_to_generation(a,b,IZ)\n return x\ndef test():\n a = np.array([[1, 2, 3], [2, 5, 2], [3, 1, 5]],dtype=float)\n b = np.array([14, 18, 20],dtype=float)\n x = gauss_elimination(a, b)\n print(x)\nif __name__ == '__main__':\n test()" }, { "alpha_fraction": 0.3765822649002075, "alphanum_fraction": 0.42009493708610535, "avg_line_length": 24.816326141357422, "blob_id": "a1fbf2aad02bb2f434909e6709f743fc95a14af4", "content_id": "a44336f75a0ddfb67eb8566eef6a9390ad112fa6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1276, "license_type": "no_license", "max_line_length": 64, "num_lines": 49, "path": "/Triangulation.py", "repo_name": "wuyan0714/math_homework", "src_encoding": "UTF-8", "text": "'''直接三角分解'''\nimport numpy as np\ndef triangle(a):\n u = np.zeros_like(a)\n l = np.zeros_like(a)\n for i in range(len(a)):\n u[0, i] = a[0,i]\n l[i,i] = 1\n if i>0:\n l[i,0] = a[i,0]/u[0,0]\n for r in range(1,len(a)):\n for i in range(r,len(a)):\n t1=0\n for k in range(r):\n t1=t1+l[r,k]*u[k,i]\n u[r,i] = a[r,i]-t1\n t2=0\n for k in range(r):\n t2=t2+l[i,k]*u[k,r]\n l[i,r] = (a[i,r]-t2)/u[r,r]\n return l,u\ndef back_to_generation(b,l,u):\n x = np.zeros_like(b)\n y = np.zeros_like(b)\n y[0] = b[0]\n for i in range(1,len(b)):\n t1 = 0\n for k in range(i):\n t1 = t1 + l[i,k]*y[k]\n y[i] = b[i]-t1\n x[-1] = y[-1]/u[-1,-1]\n for i in range(len(b)-1,-1,-1):\n t2 = 0\n for k in range(i+1,len(b)):\n t2 = t2 + u[i,k]*x[k]\n x[i] =(y[i]-t2)/u[i,i]\n return x\ndef triangulation(a,b):\n l,u = triangle(a)\n print(l,u)\n x = back_to_generation(b,l,u)\n return x\ndef test():\n a = np.array([[1, 2, 3], [2, 5, 2], [3, 1, 5]], dtype=float)\n b = np.array([14, 18, 20], dtype=float)\n x = triangulation(a,b)\n print(x)\nif __name__ == '__main__':\n test()" }, { "alpha_fraction": 0.44539782404899597, "alphanum_fraction": 0.4726988971233368, "avg_line_length": 24.65999984741211, "blob_id": "8da69beae5c50226a65ac5b06df0e45d851044c7", "content_id": "2aa79ed7b7fc7ea08733db6dc4440a50b2aa82fb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1416, "license_type": "no_license", "max_line_length": 55, "num_lines": 50, "path": "/Column_principal_elimination.py", "repo_name": "wuyan0714/math_homework", "src_encoding": "UTF-8", "text": "'''列主元消去法'''\nimport numpy as np\n#按列选主元\ndef max(a,k):\n i = k + np.argmax(np.abs(a[k:, k]))\n return i\n#第k+1次对第i+1行进行消元\ndef elimination_line(a,b,k,i):\n m = a[i][k]/a[k][k]\n for j in range(k+1,len(a)):\n a[i][j] = a[i][j] - m*a[k][j]\n a[i][k] = 0\n b[i] = b[i]-m*b[k]\n return a,b\n#对于n*n阶矩阵,需要进行n-1次迭代,每次需要从第k+1行到第n行进行消元\ndef elimination(a,b):\n for k in range(0,len(a)-1):\n ik = max(a,k)\n if a[ik,k]==0:\n print('此矩阵非奇异')\n return\n if ik!=k:\n a[[ik, k], :] = a[[k, ik], :]\n b[[ik, k]] = b[[k, ik]]\n print(a,b)\n for i in range(k+1,len(a)):\n elimination_line(a,b,k,i)\n print(a,b)\n return a,b\ndef back_to_generation(a,b,x):\n x[len(a)-1] = b[-1]/a[-1][-1]\n for i in range(len(a)-2,-1,-1):\n t = 0\n for j in range(i+1,len(a)):\n t = t+a[i][j]*x[j]\n x[i] = (b[i]-t)/a[i][i]\n return x\n#对a,b进行消元计算与回代计算\ndef gauss_elimination(a,b):\n x = [0 for i in range(len(a))]\n a,b = elimination(a,b)\n x = back_to_generation(a,b,x)\n return x\ndef test():\n a = np.array([[1,2,3],[2,5,2],[3,1,5]],dtype=float)\n b = np.array([14,18,20],dtype=float)\n x = gauss_elimination(a,b)\n print(x)\nif __name__ == '__main__':\n test()" } ]
3
MonkeyGone2Heaven/SCNI_Toolbar
https://github.com/MonkeyGone2Heaven/SCNI_Toolbar
be927238c5879281f84cfb38689b22fad9b1803a
4ce9d00638a188c66c06bec2d33563735f49a9c9
058c7be09032cdfbd1a939ab49ccd389c349c124
refs/heads/master
2020-12-02T12:44:08.015642
2019-06-27T19:18:20
2019-06-27T19:18:20
96,582,401
5
4
null
null
null
null
null
[ { "alpha_fraction": 0.6938083171844482, "alphanum_fraction": 0.6972010135650635, "avg_line_length": 40.35087585449219, "blob_id": "cd4199525cdb0a8c7ba462057708dd0cd0319377", "content_id": "17552b5d7177c160d3b739cbaf78e3ae6b804450", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2358, "license_type": "no_license", "max_line_length": 138, "num_lines": 57, "path": "/SCNI_Documents/SpikeSortGuide.md", "repo_name": "MonkeyGone2Heaven/SCNI_Toolbar", "src_encoding": "UTF-8", "text": "# SCNI Guide to WaveClus Spike Sorting on the NIH HPC Cluster\n\n## 1. Accessing Felix and Biowulf \n\n### Before you begin\n* Apply for NIH Biowulf and Helix HPC accounts: https://hpc.nih.gov/docs/accounts.html \n* Sign up to GitHub https://github.com/\n* Contact the SCNI admin and request to be added to:\n 1) the NIF group's Helix directory\n 2) the NIF GitHub user group: https://github.com/nimh-nif\n\n\n### Using NoMachine\n\n* Install NoMachine from the web: https://www.nomachine.com/\n* Setup an SSH connection to host: felix.nimh.nih.gov, port 22 (https://hpc.nih.gov/docs/connect.html)\n* Connect to Felix\n* In Felix, open a terminal window: Applications > System Tools > Terminal\n* Navigate to the NIF group's directory, create a folder for yourself and clone the github repository 'SortSpikes'\n\n::\n\n cd /data/NIF/projects # change directory\n mkdir leathersml # create a new directory with your username\n chmod 755 -R /leathersml # update permissions for the new directory\n cd leathersml # change directory\n git clone https://github.com/nimh-nif/SortSpikes.git # clone the SortSpikes git repository to your directory\n\n* Open Matlab\n\n::\n\n module load matlab\n matlab &\n\n### Mounting Helix to the desktop\n\n* Full instructions for all operating systems are providede here: https://hpc.nih.gov/docs/transfer.html\n* On OSX, open a Finder window. From the menu at the top of the screen select Go > Connect to Server\n* Enter the server address: smb://helixdrive.nih.gov/NIF and click connect\n* Enter your NIH username and password\n* The NIF group's Helix directory is now mapped to your local Mac as a volume.\n\n* Open an OSX Terminal window (e.g. click the magnifying glass in the top right corner of the desktop and type 'terminal')\n* Open an SSH connection to Felix:\n\n::\n\n ssh [email protected]\n \n\n### Modifying SortSpikes Matlab code\n\n* To allow individual users to customize parameters for sorting their data, each user should create their own copy of the following files:\n * Copy BatchNeuroScript_APM.m, save and rename with own initials\n * Copy set_waveclus_handles_APM.m, save and rename with own initials\n * Edit PreprocessNeuroData.m to include new user in the switch statement\n\n" }, { "alpha_fraction": 0.7577683329582214, "alphanum_fraction": 0.7888417840003967, "avg_line_length": 63.318180084228516, "blob_id": "cd655c0d86905063e76d4701af5295dbe3c15075", "content_id": "4ad459b05be021ebf4f6d3f5aaf8cfa22b56fc20", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1416, "license_type": "no_license", "max_line_length": 534, "num_lines": 22, "path": "/README.md", "repo_name": "MonkeyGone2Heaven/SCNI_Toolbar", "src_encoding": "UTF-8", "text": "<img src=\"https://github.com/MonkeyGone2Heaven/SCNI_Toolbar/blob/master/SCNI_Subfunctions/Icons/Systems/Logo_SCNI_Toolbar.png\" alt=\"SCNI_Toolbar\" width=\"150\" height=\"75\"/> \n\nThe SCNI Toolbar is a library of Matlab / GNU Octave functions and graphical user interfaces (GUIs) that provide behavioural neurocientists with a simple and intuitive method of loading, editing, and saving parameters for a variety of systems involved in visual neurophysiology and neuroimaging experiments. More specifically, the toolbar is intended to be used in conjunction with experiments programmed using PsychToolbox extensions in Matlab/ GNU Octave (optionally under the PLDAPS framework). Currently supported systems include:\n\nVisual stimulus presentation:\n\n* [PsychToolbox](http://psychtoolbox.org/)\n* [VPixx, DataPixx2](http://vpixx.com/products/tools-for-vision-sciences/display-drivers/datapixx2/)\n* [PLDAPS](https://github.com/HukLab/PLDAPS)\n\nNeurophysiology recording:\n\n* [Tucker Davis Technologies, OpenEx](http://www.tdt.com/openex.html)\n* [Tucker Davis Technologies, Synapse](http://www.tdt.com/Synapse/index.html)\n* [Open Ephys](http://www.open-ephys.org/)\n\nBehavioural control:\n\n* [SR Research, EyeLink](http://www.sr-research.com/)\n* [Arrington Research, ViewPoint](http://www.arringtonresearch.com/)\n\n![SCNI Toolbar](https://user-images.githubusercontent.com/7523776/41602802-9d710d1e-73a9-11e8-9120-f2f8c3ab6e16.png)\n\n" }, { "alpha_fraction": 0.7149309515953064, "alphanum_fraction": 0.7845377326011658, "avg_line_length": 113.09091186523438, "blob_id": "d18531e4373ba6acf12972a13ac518de0c8ca552", "content_id": "211e1bf70fd8721b13e996de4a7200ef7918615d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3764, "license_type": "no_license", "max_line_length": 692, "num_lines": 33, "path": "/SCNI_Documents/EyeTrackerGoggles.md", "repo_name": "MonkeyGone2Heaven/SCNI_Toolbar", "src_encoding": "UTF-8", "text": "{\\rtf1\\ansi\\ansicpg1252\\cocoartf1504\\cocoasubrtf830\n{\\fonttbl\\f0\\fswiss\\fcharset0 Helvetica;}\n{\\colortbl;\\red255\\green255\\blue255;}\n{\\*\\expandedcolortbl;;}\n\\margl1440\\margr1440\\vieww10800\\viewh8400\\viewkind0\n\\pard\\tx720\\tx1440\\tx2160\\tx2880\\tx3600\\tx4320\\tx5040\\tx5760\\tx6480\\tx7200\\tx7920\\tx8640\\pardirnatural\\partightenfactor0\n\n\\f0\\fs24 \\cf0 The SCNI uses EyeLink II video-based eye tracking systems from [SR Research](http://www.sr-research.com/EL_II.html). While these systems are now over a decade old, this earlier 'primate mount' design is the only model available that uses light-weight cameras that can be mounted close to the subject, or even head mounted. The SCNI also uses 55\" LG 4K OLED televisions with passive 3D for visual stimulation. In order to display stimuli stereoscopically, polarizing filters must be held in front of each of the subject's eyes. To achieve both of these goals, we designed a simple pair of goggles that can be positioned in front of a head-fixed animal during vision experiments.\\\n\\\n![Goggles](https://user-images.githubusercontent.com/7523776/31058012-c1d97f7c-a6ba-11e7-82cc-edae84d1ec9e.jpg)\\\n\\\n## Parts\\\n\\\nFor the initial design, we used the following equipment to suspend the goggles in front of the animal by mounting it to the animal's chair:\\\n* Manfrotto 244N variable-friction magic arm ([B&H Photo](https://www.bhphotovideo.com/c/product/325444-REG/Manfrotto_244N_244N_Variable_Friction_Magic.html))\\\n* Manfrotto 035RL super clamp ([B&H Photo](https://www.bhphotovideo.com/c/product/546356-REG/Manfrotto_035RL_035RL_Super_Clamp_with.html))\\\n\\\nThe following parts were ordered:\\\n* Nylon threaded rod, 8-32 ([McMaster-Carr](https://www.mcmaster.com/#standard-threaded-rods/=19kw5yo))\\\n* Nylon 8-32 hex nuts ([McMaster-Carr](https://www.mcmaster.com/#94812a400/=18o12ll))\\\n* Steel socket cap screw, 3/8\"-16 thread size, 1\" long ([McMaster-Carr](https://www.mcmaster.com/#92196a624/=19c8gb5))\\\n* 1/4\"-20 hex nuts ([McMaster-Carr](https://www.mcmaster.com/#92673a113/=19kwa98))\\\n* 1/4\"-20 slotted tripod screws, 12mm long ([B&H Photo](https://www.bhphotovideo.com/c/product/1049142-REG/desmond_5_sach14_knurled_1_4_20_slotted_screws.html))\\\n* High-performance hot mirror, 45\\'b0 AOI, 101 x 127mm ([Edmund Optics](https://www.edmundoptics.com/optics/optical-mirrors/hot-cold-mirrors/45deg-aoi-101-x-127mm-hot-mirror/))\\\n\\\nWe removed the polarizing filters from pairs of LG AG-F310 passive 3D glasses that came with the TVs (although other brands of passive 3D glasses with circular polarization could also work). Alternatively, if you want to present stereoscopic stimulation but do not have a passive 3D display then you could use anaglyph color filters instead.\\\n\\\n## Construction\\\n\\\nThe main part of the goggles is 3D printed. We initially used the SCNI's Form2 from FormLabs, using the black V2 resin. The geometry of the part is robust enough for this material to work, although it is relatively brittle and will not tolerate being dropped or bumped very well. For future, head-mounted iterations we will therefore explore the use of 3D printed carbon-PEEK, for added strength and potentially reduced weight.\\\n\\\n## Towards a head-mounted design\\\nThe current design weighs a total of XXXg (including cameras, excluding clamp and arm), making it too heavy for sustained use as a head-mounted device. Since the hot mirror and cameras contribute the majority of this weight, the ideal solution will be to use much smaller cameras that image the eye directly, eliminating the need for a mirror. This design is popular amongst commercial human head-mounted eye-tracking devices (e.g. ), but typically delivers lower frame rates (~60Hz vs 500Hz for EyeLink II) and requires customization for use in non-human subjects. }" }, { "alpha_fraction": 0.739506185054779, "alphanum_fraction": 0.786831259727478, "avg_line_length": 122.56497192382812, "blob_id": "d938fc7928e5446cee42bd66b46369a2b373e13e", "content_id": "976f2671003d40c867888daa3349a459356d4c0f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 21870, "license_type": "no_license", "max_line_length": 1009, "num_lines": 177, "path": "/SCNI_Documents/NIFStereoProjectorGuide.md", "repo_name": "MonkeyGone2Heaven/SCNI_Toolbar", "src_encoding": "UTF-8", "text": "{\\rtf1\\ansi\\ansicpg1252\\cocoartf1504\\cocoasubrtf830\n{\\fonttbl\\f0\\fswiss\\fcharset0 Helvetica;}\n{\\colortbl;\\red255\\green255\\blue255;}\n{\\*\\expandedcolortbl;;}\n\\margl1440\\margr1440\\vieww10800\\viewh8400\\viewkind0\n\\pard\\tx720\\tx1440\\tx2160\\tx2880\\tx3600\\tx4320\\tx5040\\tx5760\\tx6480\\tx7200\\tx7920\\tx8640\\pardirnatural\\partightenfactor0\n\n\\f0\\fs24 \\cf0 # NIF Stereo Projector Calibration Guide\\\n(Written by Aidan Murphy, March 2014)\\\n\\\n## 1.0. Introduction to the Stereo Projection System\\\n\\\nThe NIF\\'92s stereo projection system is designed for presentation of stereoscopic stimuli. It was initially intended for use in binocular rivalry experiments (presentation of completely different images to each eye, which cannot be binocularly fused) but can potentially be used for 3D binocular disparity experiments (slightly shifted images in each eye, which can be fused) provided it is carefully calibrated. Although each projector can be used independently for regular stimulus presentation, the fact that the system is intended for stereo presentation introduces several caveats:\\\n\\\n**1.\tBoth projectors must always be calibrated together, even if you only intend to use one of them.**\\\nThe reason for this is that any changes made to one projector will affect those variables relative to those of the other projector, and will thus be disruptive to any users conducting stereo experiments. For the purpose of any experiment involving stereoscopic presentation, matched calibration of various projector parameters is essential. Therefore, unless there is facility-wide consensus that the stereoscopic presentation capabilities are not being utilized, users should always calibrate both of the stereo projectors.\\\n\\\n**2.\tExperiments where color perception (and thus color calibration) is important cannot be performed.**\\\nThis is because the stereo projectors are fitted with INFITEC interference filters (Jorke and Fritz, 2003). Each projector\\'92s interference filter allows three peak wavelengths of the visible spectrum to pass through it, corresponding to the tuning of primate cone receptors (RGB). Stereoscopic presentation is achieved because the peaks for each projector are offset (see figure 1), thus minimizing cross-talk. When viewed with both eyes through the corresponding A and B filters, full color images can be presented stereoscopically. However, when viewed through just one filter, the left eye image (top projector) appears slightly greenish and the right eye image (bottom projector) appears slightly reddish, due to the proximity of each band-pass peak with the corresponding peak sensitivity of cone receptors in the primate retina.\\\n\\\n<img src=\"https://user-images.githubusercontent.com/7523776/30132144-66cb322a-931d-11e7-8ddc-15ef55d25df5.png\" alt=\"INFITEC\" width=\"400\"> <figcaption><b>Figure 1.</b> Spectrum of a broadband thermal light source, (a) without, (b) with type A, and (c) with type B INFITEC filters.</figcaption>\\\n\\\n\\\n### 1.1.\tMotivation: Why calibrate?\\\n\\\nCalibration of the projection image size, position, shape, focus, and luminance are important for any visual experiment. By ignoring basic properties of the visual stimulation used during your imaging experiment, you will at best appear neglectful and unmethodical to reviewers, while at worst you will unwittingly introduce biases into you results. It is therefore the responsibility of individual experimenters to ensure that these features are correctly calibrated for each session. The following procedures are intended to make checking and adjusting projector calibration fast and easy, thus encouraging the maintenance of the projector system and benefiting all users. \\\n\\\n### 1.2. \tHardware Overview\\\n\\\nThis section briefly describes the projection and measurement equipment required for calibration of the projector systems in the NIF. PDFs of the equipment manuals should be located in the same directory as this document.\\\n\\\n* **Navitar 2.75\\'94-5.0\\'94 NuView Zoom lenses**\\\nThe Epson projectors are located approximately 3.05 m from the rear projection screen inside the bore of the scanner. The scanner bore is 0.4m in diameter, and the area visible to an animal inside the bore is further restricted by the headpost. Consequently, the projected image needs to be considerably smaller than would be typical \\\n\\\n* **Neutral density filters**\\\nThe bright images projected by the Epsons are intended for large screens. Here, we instead focus the light onto a small area on the screen inside the bore, resulting in luminance intensities exceeding the limits of our photometer\\'92s measurement capabilities (>1000cd/m2)! To compensate for this, we have attached neutral density (ND) filters, which are designed to attenuate luminance linearly across the spectrum. The strengths of neutral density filter available in the NIF are 0.9 ND (12.5% transmittance) and 0.6 ND (25% transmittance). The filters are fitted on the outside of the Navitar lens using custom-made plastic filter holders.\\\n\\\n* **INFITEC filters**\\\nIn order to view the different interference filtered projector images in each eye, the viewer must wear a pair of INFITEC filter glasses. The curved glass filter lenses from three pairs of INFITEC Plus glasses have been removed and fitted into custom designed equipment:\\\n\\\n<img src=\"https://user-images.githubusercontent.com/7523776/30132161-765d8abc-931d-11e7-9294-c299f86136a5.png\" alt=\"INFITEC glasses\" width=\"400\">\\\n\\\n* **Binocular monkey goggles**\\\nThe binocular goggles attach to the front of the monkey chair, the same way as the regular mirror, and holds the components necessary for the monkey to view the screen inside the bore. A first surface mirror ([Edmund Optics](https://www.edmundoptics.com/optics/optical-mirrors/flat-mirrors/First-Surface-Mirrors/)) with adjustable angle reflects light from the rear-projected image on the screen, towards the monkey\\'92s eyes. The light passes through the two INFITEC filters, and is split by a central septum that separates the two channels. Finally, it passes through of a pair of hot mirrors angled at 45 to the line of sight ([Edmund Optics](https://www.edmundoptics.com/optics/optical-mirrors/specialty-mirrors/high-performance-hot-mirrors/)), and into the monkey\\'92s eyes. The hot mirrors reflect infrared light from IR diodes located at the sides, off the eye, and back into the MR compatible camera(s) ([MRC](http://www.mrc-systems.de/en/products/mr-compatible-cameras)), also located at the sides. \\\n\\\n<img src=\"https://user-images.githubusercontent.com/7523776/30132148-6c71a1f0-931d-11e7-8994-6a7674c8a184.png\" alt=\"Binocular goggles\" width=\"400\"> <figcaption>A 3D render of the .stl file for 3D-printed binocular goggles for macaque fMRI.</figcaption>\\\n\\\n* **Luminance calibration filters**\\\nIn order to measure the luminance of the projected images as they will appear to the monkey, luminance measurements should be made through the INFITEC filters, and with both projectors turned on simultaneously. For this purpose, one of each filter has been fitted into holders that slot between the luminance probe and the test screen (Figure 3).\\\n\\\n* **Spatial calibration binoculars**\\\nIn order for users to check the spatial alignment of the projectors, one pair of INFITEC Plus glasses have been kept intact. These can be used alone, or in combination with a pair of binoculars, to view the projected images on the screen inside the bore.\\\n\\\n* **Photometer: Konica-Minolta CA-210 Color Analyzer**\\\nCA-210 Universal measuring probe\\\n\\\n<img src=\"https://user-images.githubusercontent.com/7523776/30132159-73848d7c-931d-11e7-930a-20da32b35eda.png\" alt=\"Konica-Minolta CA-210\" width=\"400\"> <figcaption><b>Figure 2.</b> Konica-Minolta CA-210 Color Analyzer.</figcaption>\\\n\\\n* **Distance measurement: Bosch DLR130**\\\nThe Bosch DLR130 is a laser-based distance measurement device. It can be used to measure the exact distance from the projector to the screen (approximately 305cm), and then to place the test screen at the same distance, in order to make accurate luminance measurements outside of the magnet.\\\n\\\n### 1.3.\tConnections\\\n* **Video Connections**\\\nSince the Epson projectors are located inside the RF shielded MR environment, they receive their DVI-D inputs through a pair of 40ft optical-fiber DVI cables, which run through the wave guide. At each end of each cable is an active (+5V DC) transmitter/ receiver that converts between the optical signal and the electrical DVI-D signal. This also reduces signal attenuation over the distance from the server room to the projectors.\\\nThe source of the projector signal is the Nvidia GeForce GFX 650Ti graphics board installed in the Stereo_Stim workstation. Specifically, HDMI and/or DVI outputs from the graphics card are sent to a pair of StarTech ST122DVIA active DVI-splitters (each requiring 9V DC input), which split each output between one projector and one console room display. \\\n\\\n* **Network Connections**\\\nThe Epson projectors feature browser-based control of basic remote functions (e.g. power on/ off). To allow these remote functions to be performed from the control room, each Epson projector must be connected to the network via the Ethernet port.\\\n\\\n* **Serial Connections**\\\nSee section \\'913.2. Serial Connection\\'92 below for information on serial connections between the photometer and the luminance measurement PC.\\\n\\\n### 1.4. Calibration setup\\\nThe strong magnetic field (4.7T) and narrow bore (40cm diameter) of the NIF\\'92s Bruker pose some difficulty for calibration of projected images on the screen inside. For this reason, an alternative setup has been devised, in which the projection is reflected twice in order to project the images to a screen outside the bore where measurements can be more easily made and more closely inspected (see figure X). \\\n\\\n<img src=\"https://user-images.githubusercontent.com/7523776/30132167-7b01dc8a-931d-11e7-9d17-7b13ef8f75b7.png\" alt=\"Probe holder\" width=\"400\"> <figcaption><b>Figure 3.</b> Probe holder. The probe is positioned with the lens 25mm away from the center of the circular projection screen. The slot between the probe and the screen allows luminance measurements to be made through each of the INFITEC filters.</figcaption>\\\n\\\n-----\\\n\\\n## 2.0. Spatial calibration\\\n\\\nThe following procedure for calibration is based on the fact that the projection image is far easier to assess outside of the bore. However, there is no substitute for assessment of image quality inside the bore and by examining the behavioral responses of your monkey. Recreating experimental conditions outside of the bore for calibration requires careful replication of the same projection distance and ambient lighting. To this end, the distance between the projectors and test screen should be fixed at the same distance as the projector frame and bore screen. See ProjectorGeometry.pdf for the approximate geometry.\\\n\\\n### 2.1. Distance measurement\\\n\\\n<img src=\"https://user-images.githubusercontent.com/7523776/30132173-7f044282-931d-11e7-8233-d474ab53f470.png\" alt=\"Bosch DLR130\" width=\"400\"> <figcaption><b>Figure 4.</b> Bosch DLR130, with On/Off button highlighted in red and reference position (front or rear edge) display and alternation button highlighted in green.</figcaption>\\\n\\\n1.\tThe bore covers should initially be open, the mirror above the bore should be in position and the primary screen should be in place inside the bore. \\\n2.\tTurn on the Bosch DLR130, by pressing the red power button (figure 4). The symbol on the right of the LCD screen displays the reference point that is currently selected. If necessary, switch the reference point to the top edge of the device by pressing the bottom left button on the keypad.\\\n3.\tHold the Bosch DLR130 next to the Epson projector lens, with the front edge of the DLR130 approximately level with the lens. Press the large red button on the DLR130. The laser should appear.\\\n4.\tMake sure that the laser point is shining on the screen inside the bore. Press the large red button on the DLR130 again. There should be a clicking sound and the distance will appear on the display. You can make repeated readings, or a reading from each projector lens to be sure before making a note of the projection distance.\\\n5.\tTurn on both projectors and move the upper mirror to a (approximately) 45 position, so that both images project to the appropriate location on the screen inside the bore. Then close the covers to the bore and place the second mirror on the cover. Move the mirror into a position so that both projected images are reflected in the direction towards the control room.\\\n6.\tMove the tripod with the calibration screen and photometer probe mounted on it so that the projected images appear on the screen. Adjust the distance of the screen until the images appear roughly focused. \\\n7.\tRepeat distance measurement using the DLR130, with the laser pointer reflecting off both mirrors and landing on the screen. Adjust the distance of the tripod supporting the external screen from the mirrors until the measured projection distance matches the distance measured inside the bore.\\\n\\\n### 2.2. Projector image adjustment\\\nTo turn on the Epson projectors, press the power button on the back of the projector once (Figure 2). To turn off, press the power button twice. The normal projection setting of the projectors (Menu > Extended > Projection) is for \\'91Rear/Ceiling\\'92, which means rear-projection and top-bottom inversion (as though ceiling- mounted upside down) to compensate for the dual mirrors in the path from the lens to the monkey\\'92s eyes. If you need to read the projector menu above the bore then you may need to temporarily restore this setting to \\'91Front\\'92 so that menu text appears correctly (see figure 5).\\\n\\\n<img src=\"https://user-images.githubusercontent.com/7523776/30132184-86a7db8e-931d-11e7-9ff0-c839529f8acc.png\" alt=\"Epson menu\" width=\"400\"> <figcaption><b>Figure 5.</b> Epson menu display. The default setting for the NIF stereo projectors mirror-inverts the projected image twice (once in each dimension) so this must be temporarily reset in order to read the menu text.</figcaption>\\\n\\\n**2.2.1. Focus**\\\nThe focus of the projected image is dependent on two main factors: the focus setting of the lens, and the projection distance. Assuming you have set the correct projection distance (confirmed using the Bosch DLR130), adjust the focus of the projected image by rotating the focus ring on the lens (outer barrel) until the projected image appears sharp and text is readable.\\\n\\\n**2.2.2. Size**\\\nThe size of the projected image is dependent on two main factors: the zoom of the lens, and the projection distance. Assuming you have set the correct projection distance, adjust the size of the projected image by rotating the zoom ring on the lens.\\\n\\\n**2.2.3. Aspect ratio**\\\nFor simplicity, the aspect ratio of the projected image should automatically match the aspect ratio of the display resolution (as set in the NVidia control panel). This way, the number of pixels per degree of visual angle will be the same in both vertical and horizontal directions, thus avoiding complications when calculating the dimensions, position, speed and acceleration of image elements in your experimental code. \\\n\\'95\tPress the \\'91Menu\\'92 button, select \\'91Aspect\\'92 and press \\'91Enter\\'92.\\\n\\'95\tSelect \\'91Auto\\'92 and press \\'91Enter\\'92. This option automatically sets the aspect ratio according to the input signal.\\\n\\\n**2.2.4. Position**\\\nThe two projected images from the Epson projectors should converge in the plane of the screen. Adjust the mirror(s) so that at least one of the projected images appears in the correct position on the calibration screen.\\\n\\'95\tFrom MATLAB, call the function DisplayGrid.m. A white grid will be displayed on a black background, with a red crosshair at the center of each image. \\\n\\'95\tFirst align the centers of the two images, since this is the most important part of the image in most experiments. Adjust the vertical alignment of the projectors using the screw on the back of the projector stand or the vertical lens shift dial on the top of the upper projector. \\\n\\'95\tTo adjust the horizontal alignment, use the horizontal lens shift dial on the top of the upper projector (the dial on the lower projector is unlikely to be accessible). \\\n\\\n<img src=\"https://user-images.githubusercontent.com/7523776/30132188-8bf84952-931d-11e7-86a3-f5db6ebcc235.png\" alt=\"Image shift\" width=\"400\">\\\n\\\n<img src=\"https://user-images.githubusercontent.com/7523776/30132191-8f53d170-931d-11e7-882e-99494bb4a7cc.png\" alt=\"Calibration grid\" width=\"400\">\\\n\\\n\\\n**2.2.5. Image shape**\\\nSince each projection is oriented at a slight angle away from the line orthogonal to the projection screen, a rectangular image will appear as an isosceles trapezium shape. This is called keystone distortion, and can be corrected as follows:\\\n\\'95\tPress the \\'91Menu\\'92 button. Select the \\'91Settings\\'92 menu and press \\'91Enter\\'92. \\\n\\'95\tSelect \\'91Keystone\\'92 and press \\'91Enter\\'92. Select \\'91H/V Keystone\\'92 and press \\'91Enter\\'92\\\n\\'95\tUse the up and down arrow buttons to adjust the vertical keystone until the image appears rectangular and the grid lines appear parallel.\\\n\\\n<img src=\"https://user-images.githubusercontent.com/7523776/30132195-932909f0-931d-11e7-9416-d9386b871927.png\" alt=\"Keystone correction\" width=\"400\">\\\n\\\n------\\\n\\\n## 3.0. Luminance calibration\\\n\\\n### 3.1. Serial Connections\\\nLuminance measurement will be performed via the Stereo_Stim workstation, located in the server room. Stereo_Stim has a DB-9 serial port installed via a PCIe board, and should appear in Windows Device Manager as \\'91COM1\\'92. Stereo_Stim\\'92s serial port should be connected to the patch panel between the server room and magnet room using an RS-232 \\'91null modem with full handshaking\\'92 serial cable. This is different from a normal serial cable because it is wired in such a way that pins 2 & 3, 4 & 6, and 7 & 8 cross over between the two connectors. A second RS-232 serial cable with straight through wiring should be connected to the corresponding DB-9 connector in the magnet room, and the other end of that cable should be connected to the CA-210 serial output.\\\n\\\n<img src=\"https://user-images.githubusercontent.com/7523776/30132199-974fbd12-931d-11e7-9413-634f0bd990d2.png\" alt=\"Konica CA-210 serial connections\">\\\n\\\n### 3.2. Photometer setup\\\nIn order to perform luminance measurements, the projectors and calibration screen should be set up as described above, with the bore covers closed and both mirrors in place. Turn both projectors on for stereo calibration, and insert the first INFITEC filter into the slot between the photometer probe and the calibration screen (see diagram), making a note of which projector/eye the filter corresponds to. \\\n\\\n<img src=\"https://user-images.githubusercontent.com/7523776/30132203-9b4a50ee-931d-11e7-86e5-c2d62aa1deac.png\" alt=\"Konica CA-210 probe\">\\\n\\\nTo set up the Konica-Minolta CA-210 photometer:\\\n1.\tMake sure the power cable, probe cable and RS232 serial cable are plugged into the sockets on the back of the Konica-Minolta CA-210.\\\n2.\tSet the POWER switch to ON ( | ). The LCD display should read \\'91Darken probe, push 0-cal key\\'92.\\\n3.\tRotate the ring on the probe to \\'910-Cal\\'92 and then press the \\'910-Cal\\'92 button [1] on the front of the CA-210. The LCD display should now show a luminance measurement of 0 cd/m2. \\\n4.\tReturn the ring on the probe to \\'91Meas\\'92. The LCD display should now show a luminance measurement greater than 0 cd/m2.\\\n5.\tPress the \\'91Remote\\'92 button [5]. The Remote LED will light, indicating that the CA-210 is awaiting serial port communication from the PC.\\\n6.\tInsert the correct INFITEC filter for the projector that you are currently calibrating into the slot in front of the measurement probe. The bottom projector uses the type A filter, which is used for the right eye and appears slightly reddish when you look through it. The top projector uses the type B filter, which is used for the left eye and appears slightly greenish when you look through it.\\\n\\\n<img src=\"https://user-images.githubusercontent.com/7523776/30132208-9f516e34-931d-11e7-8c2e-d0d40eae0987.png\" alt=\"Konica CA-210 front panel\">\\\n\\\n### 3.3. Data acquisition in MATLAB\\\nSince MATLAB is used for stimulus presentation in the NIF \\'96 via either PsychophysicsToolbox (Brainard, 1997;Pelli, 1997) or MonkeyLogic (Asaad et al., 2013) \\'96 it is convenient to use for luminance testing. In order to read data from the CA210 via the serial port connection, the Data Acquisition Toolbox must be available, which in turn requires a 32-bit version of MATLAB.\\\n\\\nIn the control room:\\\n1.\tTurn off the lights in the magnet room. \\\n2.\tSelect the Stereo_Stim workstation and open MATLAB r2013b (or any version later than r2009). \\\n3.\tFrom the MATLAB command line, call the function \\'91LuminanceCal.m\\'92. A dialog box will appear asking you to enter the following information:\\\n\\\n### 3.4. Luminance analysis \\\n\\\n\\\n### 3.5. Applying CLUTs during your experiment\\\n\\\n\\\n-----\\\n\\\n## References\\\n\\\n* Asaad, W.F., Santhanam, N., Mcclellan, S., and Freedman, D.J. (2013). High-performance execution of psychophysical tasks with complex visual stimuli in MATLAB. Journal of neurophysiology 109, 249.\\\n* Ban, H., and Yamamoto, H. (2013). A non\\'96device-specific approach to display characterization based on linear, nonlinear, and hybrid search algorithms. Journal of vision 13, 20.\\\n* Brainard, D.H. (1997). The psychophysics toolbox. Spatial vision 10, 433-436.\\\n* Jorke, H., and Fritz, M. (2003). Infitec-a new stereoscopic visualisation tool by wavelength multiplex imaging. Proceedings of Electronic Displays 2003.\\\n* Pelli, D.G. (1997). The VideoToolbox software for visual psychophysics: Transforming numbers into movies. Spatial vision 10, 437-442.}" }, { "alpha_fraction": 0.5491730570793152, "alphanum_fraction": 0.554046094417572, "avg_line_length": 31.944862365722656, "blob_id": "810a8664fb17aed17729c91df529571ac7600788", "content_id": "43ea2d07e22288a9c037ac863787c4b3204e475c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13544, "license_type": "no_license", "max_line_length": 288, "num_lines": 399, "path": "/SCNI_Subfunctions/TDT_Matlab/SynapseAPI/Python/SynapseAPI.py", "repo_name": "MonkeyGone2Heaven/SCNI_Toolbar", "src_encoding": "UTF-8", "text": "import json\r\nimport time\r\nimport sys\r\nimport re\r\ntry:\r\n import httplib as http # python 2.x\r\nexcept ImportError:\r\n import http.client as http # python 3.x\r\n\r\nclass SynapseAPI:\r\n Modes = ('Idle', 'Standby', 'Preview', 'Record') #, 'Unknown'\r\n\r\n def __init__(self, server = \"localhost\", port = 24414):\r\n self.synCon = http.HTTPConnection(server, port)\r\n self.lastReqStr = ''\r\n self.reSueTank = re.compile('subject|user|experiment|tank|block')\r\n self.demoExperiments = ['demoAudioStim1','demoUser1','demoPCSort','demoBoxSort','demoTetSort','demoChanMap','demoSigSelector','demoSigInjector','demoElecStim','demoFileStim','demoParSeq']\r\n self.demoRequiredGizmos = {'demoAudioStim1':'aStim1','demoUser1':'TagTest1','demoPCSort':'Neu1','demoBoxSort':'Box1','demoTetSort':'Tet1','demoChanMap':'Map1','demoSigSelector':'Sel1','demoSigInjector':'Inj1','demoElecStim':'eStim1','demoFileStim':'fStim1','demoParSeq':'ParSeq1'}\r\n\r\n def __del__(self):\r\n self.synCon.close()\r\n\r\n def connect(self):\r\n self.synCon.close()\r\n try:\r\n self.synCon.connect()\r\n except Exception as e:\r\n raise Exception('failed to connect to Synapse\\n' + str(e))\r\n\r\n def exceptMsg(self):\r\n retval = ''\r\n\r\n if 'params' in self.lastReqStr:\r\n retval = '\\nSynapse may need to be in non-Idle mode'\r\n elif self.reSueTank.search(self.lastReqStr) is not None:\r\n retval = '\\nSynapse may need to be in Idle mode'\r\n\r\n return retval\r\n\r\n def getResp(self):\r\n try:\r\n resp = self.synCon.getresponse()\r\n\r\n # success\r\n if resp.status == 200:\r\n retval = json.loads(resp.read().decode('utf-8'))\r\n # previous request sent was invalid, why?\r\n else:\r\n raise Exception('%s%s' % (resp.reason, self.exceptMsg()))\r\n\r\n except:\r\n # some HTTP exceptions are such that subsequent communications may fail if we don't re-establish\r\n self.connect()\r\n raise Exception('failed to retrieve response from Synapse' + self.exceptMsg())\r\n\r\n return retval\r\n\r\n def sendRequest(self, reqTypeStr, reqStr, reqData = None):\r\n '''\r\n reqTypeStr = HTTP methods, e.g. 'GET', 'PUT', 'OPTIONS'\r\n reqData = JSON formatted data\r\n '''\r\n\r\n try:\r\n if reqData is None:\r\n self.synCon.request(reqTypeStr, reqStr)\r\n else:\r\n self.synCon.request(reqTypeStr, reqStr, reqData, {'Content-type' : 'application/json'})\r\n\r\n self.lastReqStr = reqStr\r\n\r\n except:\r\n self.connect()\r\n raise Exception('failed to send %s %s to Synapse' % (reqTypeStr, reqStr))\r\n\r\n def sendGet(self, reqStr, respKey = None, reqData = None):\r\n self.sendRequest('GET', reqStr, reqData)\r\n resp = self.getResp()\r\n\r\n try:\r\n if respKey is None:\r\n retval = resp\r\n else:\r\n retval = resp[respKey]\r\n\r\n except:\r\n retval = None\r\n\r\n return retval\r\n\r\n def sendPut(self, reqStr, reqData):\r\n self.sendRequest('PUT', reqStr, reqData)\r\n # we must read and 'clear' response\r\n # otherwise subsequent HTTP request may fail\r\n self.getResp()\r\n\r\n def sendOptions(self, reqStr, respKey):\r\n self.sendRequest('OPTIONS', reqStr)\r\n\r\n try:\r\n retval = self.getResp()[respKey]\r\n except:\r\n retval = []\r\n\r\n return retval\r\n\r\n def parseJsonString(self, jsonData):\r\n try:\r\n retval = str(jsonData)\r\n except:\r\n retval = ''\r\n\r\n return retval\r\n\r\n def parseJsonStringList(self, jsonData):\r\n retval = []\r\n for value in jsonData:\r\n retval.append(self.parseJsonString(value))\r\n\r\n return retval\r\n\r\n def parseJsonFloat(self, jsonData, result = []):\r\n try:\r\n retval = float(jsonData)\r\n except:\r\n retval = 0.0\r\n # notify caller if interested\r\n if len(result) > 0:\r\n result[0] = False\r\n\r\n return retval\r\n\r\n def parseJsonFloatList(self, jsonData, result = []):\r\n retval = []\r\n for value in jsonData:\r\n retval.append(self.parseJsonFloat(value, result))\r\n\r\n return retval\r\n\r\n def parseJsonInt(self, jsonData):\r\n return int(self.parseJsonFloat(jsonData))\r\n\r\n def getMode(self):\r\n '''\r\n -1: Error\r\n 0: Idle\r\n 1: Standby\r\n 2: Preview\r\n 3: Record\r\n '''\r\n\r\n try:\r\n retval = self.Modes.index(self.sendGet('/system/mode', 'mode'))\r\n except:\r\n retval = -1\r\n\r\n return retval\r\n\r\n def getModeStr(self):\r\n '''\r\n '' (Error)\r\n 'Idle'\r\n 'Standby'\r\n 'Preview'\r\n 'Record'\r\n '''\r\n\r\n retval = self.getMode()\r\n if retval == -1:\r\n retval = ''\r\n else:\r\n retval = self.Modes[retval]\r\n\r\n return retval\r\n\r\n def setMode(self, mode):\r\n '''\r\n mode must be an integer between 0 and 3, inclusive\r\n '''\r\n\r\n if mode in range(len(self.Modes)):\r\n self.sendPut('/system/mode', json.dumps({'mode' : self.Modes[mode]}))\r\n else:\r\n raise Exception('invalid call to setMode()')\r\n\r\n def setModeStr(self, modeStr):\r\n '''\r\n string equivalent of setMode()\r\n '''\r\n\r\n try:\r\n mode = self.Modes.index(modeStr)\r\n except:\r\n raise Exception('invalid call to setModeStr()')\r\n\r\n self.setMode(mode)\r\n\r\n def issueTrigger(self, id):\r\n self.sendPut('/trigger/' + str(id), None)\r\n\r\n def getSystemStatus(self):\r\n retval = {'sysLoad' : 0, 'uiLoad' : 0, 'errorCount' : 0, 'rateMBps' : 0, 'recordSecs' : 0}\r\n resp = self.sendGet('/system/status')\r\n\r\n sysStat = {'sysLoad' : '', 'uiLoad' : '', 'errors' : '', 'dataRate' : '', 'recDur' : ''}\r\n for key in resp:\r\n try:\r\n sysStat[key] = resp[key]\r\n except:\r\n continue\r\n\r\n # Synapse internal keys : user friendly keys\r\n keyMap = {'sysLoad' : 'sysLoad', 'uiLoad' : 'uiLoad', 'errors' : 'errorCount', 'dataRate' : 'rateMBps', 'recDur' : 'recordSecs'}\r\n for key in sysStat:\r\n try:\r\n if key == 'dataRate':\r\n # '0.00 MB/s'\r\n retval[keyMap[key]] = float(sysStat[key].split()[0])\r\n elif key == 'recDur':\r\n # 'HH:MM:SSs'\r\n recDur = sysStat[key][:-1].split(':')\r\n retval[keyMap[key]] = int(recDur[0]) * 3600 + int(recDur[1]) * 60 + int(recDur[2])\r\n else:\r\n retval[keyMap[key]] = int(sysStat[key])\r\n\r\n except:\r\n continue\r\n\r\n return retval\r\n\r\n def getPersistModes(self):\r\n return self.parseJsonStringList(self.sendOptions('/system/persist', 'modes'))\r\n\r\n def getPersistMode(self):\r\n return self.parseJsonString(self.sendGet('/system/persist', 'mode'))\r\n\r\n def setPersistMode(self, modeStr):\r\n self.sendPut('/system/persist', json.dumps({'mode' : modeStr}))\r\n\r\n def getSamplingRates(self):\r\n retval = {}\r\n resp = self.sendGet('/processor/samprate')\r\n\r\n for proc in list(resp.keys()):\r\n retval[self.parseJsonString(proc)] = self.parseJsonFloat(resp[proc])\r\n\r\n return retval\r\n\r\n def getKnownSubjects(self):\r\n return self.parseJsonStringList(self.sendOptions('/subject/name', 'subjects'))\r\n\r\n def getKnownUsers(self):\r\n return self.parseJsonStringList(self.sendOptions('/user/name', 'users'))\r\n\r\n def getKnownExperiments(self):\r\n return self.parseJsonStringList(self.sendOptions('/experiment/name', 'experiments'))\r\n\r\n def getKnownTanks(self):\r\n return self.parseJsonStringList(self.sendOptions('/tank/name', 'tanks'))\r\n\r\n def getKnownBlocks(self):\r\n return self.parseJsonStringList(self.sendOptions('/block/name', 'blocks'))\r\n\r\n def getCurrentSubject(self):\r\n return self.parseJsonString(self.sendGet('/subject/name', 'subject'))\r\n\r\n def getCurrentUser(self):\r\n return self.parseJsonString(self.sendGet('/user/name', 'user'))\r\n\r\n def getCurrentExperiment(self):\r\n return self.parseJsonString(self.sendGet('/experiment/name', 'experiment'))\r\n\r\n def getCurrentTank(self):\r\n return self.parseJsonString(self.sendGet('/tank/name', 'tank'))\r\n\r\n def getCurrentBlock(self):\r\n return self.parseJsonString(self.sendGet('/block/name', 'block'))\r\n\r\n def setCurrentSubject(self, name):\r\n self.sendPut('/subject/name', json.dumps({'subject' : name}))\r\n\r\n def setCurrentUser(self, name, pwd = ''):\r\n self.sendPut('/user/name', json.dumps({'user' : name, 'pwd' : pwd}))\r\n\r\n def setCurrentExperiment(self, name):\r\n self.sendPut('/experiment/name', json.dumps({'experiment' : name}))\r\n\r\n def setCurrentTank(self, name):\r\n self.sendPut('/tank/name', json.dumps({'tank' : name}))\r\n\r\n def setCurrentBlock(self, name):\r\n self.sendPut('/block/name', json.dumps({'block' : name}))\r\n\r\n def createTank(self, path):\r\n self.sendPut('/tank/path', json.dumps({'tank' : path}))\r\n\r\n def createSubject(self, name, desc = '', icon = 'mouse'):\r\n self.sendPut('/subject/name/new', json.dumps({'subject' : name, 'desc' : desc, 'icon' : icon}))\r\n\r\n def getGizmoNames(self):\r\n return self.parseJsonStringList(self.sendOptions('/gizmos', 'gizmos'))\r\n\r\n def getParameterNames(self, gizmoName):\r\n return self.parseJsonStringList(self.sendOptions('/params/' + gizmoName, 'parameters'))\r\n\r\n def getParameterInfo(self, gizmoName, paramName):\r\n info = self.parseJsonStringList(self.sendGet('/params/info/%s.%s' % (gizmoName, paramName), 'info'))\r\n keys = ('Name', 'Unit', 'Min', 'Max', 'Access', 'Type', 'Array')\r\n\r\n retval = {}\r\n for i in range(len(keys)):\r\n key = keys[i]\r\n\r\n try:\r\n retval[key] = info[i]\r\n\r\n if key == 'Array' and info[i] != 'No' and info[i] != 'Yes':\r\n retval[key] = int(info[i])\r\n elif key == 'Min' or key == 'Max':\r\n retval[key] = float(info[i])\r\n\r\n except:\r\n retval[key] = None\r\n\r\n return retval\r\n\r\n def getParameterSize(self, gizmoName, paramName):\r\n return self.parseJsonInt(self.sendGet('/params/size/%s.%s' % (gizmoName, paramName), 'value'))\r\n\r\n def getParameterValue(self, gizmoName, paramName):\r\n value = self.sendGet('/params/%s.%s' % (gizmoName, paramName), 'value')\r\n\r\n didConvert = [True]\r\n retval = self.parseJsonFloat(value, didConvert)\r\n \r\n if not didConvert[0]:\r\n retval = self.parseJsonString(value)\r\n\r\n return retval\r\n\r\n def getParameterValues(self, gizmoName, paramName, count = -1, offset = 0):\r\n '''\r\n if count == -1:\r\n count = getParameterSize(gizmoName, paramName)\r\n '''\r\n\r\n if count == -1:\r\n try:\r\n count = self.getParameterSize(gizmoName, paramName)\r\n except:\r\n count = 1\r\n\r\n values = self.sendGet('/params/%s.%s' % (gizmoName, paramName),\r\n 'values',\r\n json.dumps({'count' : count, 'offset' : offset}))\r\n\r\n # HACK to pass variable by reference\r\n didConvert = [True]\r\n retval = self.parseJsonFloatList(values, didConvert)\r\n \r\n if not didConvert[0]:\r\n retval = self.parseJsonStringList(values)\r\n \r\n return retval[:min(count, len(retval))]\r\n\r\n def setParameterValue(self, gizmoName, paramName, value):\r\n self.sendPut('/params/%s.%s' % (gizmoName, paramName), json.dumps({'value' : value}))\r\n\r\n def setParameterValues(self, gizmoName, paramName, values, offset = 0):\r\n self.sendPut('/params/%s.%s' % (gizmoName, paramName), json.dumps({'offset' : offset, 'values' : values}))\r\n\r\n def appendExperimentMemo(self, experiment, memo):\r\n self.sendPut('/experiment/notes', json.dumps({'experiment' : experiment, 'memo' : memo}))\r\n\r\n def appendSubjectMemo(self, subject, memo):\r\n self.sendPut('/subject/notes', json.dumps({'subject' : subject, 'memo' : memo}))\r\n\r\n def appendUserMemo(self, user, memo):\r\n self.sendPut('/user/notes', json.dumps({'user' : user, 'memo' : memo}))\r\n\r\n def startDemo(self, name):\r\n if name not in self.demoExperiments:\r\n raise Exception('%s is not a valid demo experiment' % name)\r\n if self.getCurrentExperiment() != name:\r\n if name not in self.getKnownExperiments():\r\n raise Exception('Experiment %s not found' % name)\r\n if self.getModeStr() != 'Idle':\r\n self.setModeStr('Idle')\r\n try:\r\n self.setCurrentExperiment(name)\r\n except:\r\n raise Exception('Experiment %s not selected' % name)\r\n\r\n if self.demoRequiredGizmos[name] not in self.getGizmoNames():\r\n raise Exception('Required gizmo %s not found' % self.demoRequiredGizmos[name])\r\n\r\n if self.getModeStr() == 'Idle':\r\n self.setPersistMode('Fresh')\r\n self.setModeStr('Record')\r\n" } ]
5
latashyoussef/USYD2015
https://github.com/latashyoussef/USYD2015
76e4fe9b1f4c41cc0f0ebd824654e327ee0a3a69
0627d86e52c535070e685a009d6f6ea203240247
194c919026024512709efb6c22261ab908565952
refs/heads/master
2020-12-25T19:15:01.751447
2015-01-22T13:59:06
2015-01-22T13:59:06
29,680,410
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7454545497894287, "alphanum_fraction": 0.8181818127632141, "avg_line_length": 26.5, "blob_id": "2c495c1baed16e34d2b7ae4101ae2d67da79f80c", "content_id": "2662f6625ac1e3a4991f2566ed27e79f5cc71acc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 55, "license_type": "no_license", "max_line_length": 43, "num_lines": 2, "path": "/README.md", "repo_name": "latashyoussef/USYD2015", "src_encoding": "UTF-8", "text": "# USYD2015\nScripts used to aid simulations of nanorods\n" }, { "alpha_fraction": 0.5073529481887817, "alphanum_fraction": 0.5514705777168274, "avg_line_length": 17, "blob_id": "26154a0a4ea9729d1e4bb1c746cc958f327c94d5", "content_id": "7688ec530f0195d44cf331f4fd8b3fac1ff19557", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 272, "license_type": "no_license", "max_line_length": 33, "num_lines": 15, "path": "/make_crystal.py", "repo_name": "latashyoussef/USYD2015", "src_encoding": "UTF-8", "text": "# Aspect Ratio\n\nD = 1.0\t\t# Diameter of the sphere\nL = 3.2\t\t# Length of the cylinder\n\ndef make_row_x(box_length, L, D):\n\trod_count = 0\n\tx = 0.5*D\n\twhile x < box_length:\n\t\ty = L/2.0\n\t\tz = D/2.0\n\t\tprint \"%f %f %f \\n\" % (x, y, z)\n\t\trod_count += 1\n\t\tx += D\n\treturn rod_count\n\n\n" } ]
2
omarka/computervision
https://github.com/omarka/computervision
9d3bf2c89e0886f578508695c7b1cd2c30679802
221481c22fc9ce3c1578caacb2dd1b67afb8c567
0ef00365e4b0fddb1253a9c63162cd70b6434482
refs/heads/master
2020-03-22T10:07:58.165369
2018-07-05T18:09:49
2018-07-05T18:09:49
139,882,830
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6150788068771362, "alphanum_fraction": 0.6564570665359497, "avg_line_length": 25.216217041015625, "blob_id": "77373f812a9f94e8666fbf979d7d94b45fb4aa0f", "content_id": "fe9079fa26df450394daf11fb306ff2cbd542ef4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6791, "license_type": "no_license", "max_line_length": 204, "num_lines": 259, "path": "/bw_colorization/bw_colorization.py", "repo_name": "omarka/computervision", "src_encoding": "UTF-8", "text": "#YUV Space\n#Takes Y - monochrome luminance channel\n#Ouptus U and V, chorminance channels, encoding the color\n\nimport cv2\nimport numpy as np\n\nimport cvxopt\n\nfrom cvxopt.modeling import variable\n\nfrom scipy.sparse import csc_matrix\nfrom scipy.sparse import csr_matrix\nfrom scipy.sparse.linalg import spsolve\nfrom scipy.sparse import lil_matrix\n\nimport os\nimport errno\n\nfrom os import path\n\n\ndef rgb2yiq(rgb):\n rgb = rgb / 255.0\n y = np.clip(np.dot(rgb, np.array([0.299, 0.587, 0.144])), 0, 1)\n i = np.clip(np.dot(rgb, np.array([0.595716, -0.274453, -0.321263])), -0.5957, 0.5957)\n q = np.clip(np.dot(rgb, np.array([0.211456, -0.522591, 0.311135])), -0.5226, 0.5226)\n yiq = rgb\n yiq[..., 0] = y\n yiq[..., 1] = i\n yiq[..., 2] = q\n return yiq\n\n\ndef yiq2rgb(yiq):\n r = np.dot(yiq, np.array([1.0, 0.956295719758948, 0.621024416465261]))\n g = np.dot(yiq, np.array([1.0, -0.272122099318510, -0.647380596825695]))\n b = np.dot(yiq, np.array([1.0, -1.106989016736491, 1.704614998364648]))\n rgb = yiq\n rgb[:, :, 0] = r\n rgb[:, :, 1] = g\n rgb[:, :, 2] = b\n return np.clip(rgb, 0.0, 1.0) * 255.0\n\nIMG_EXTENSIONS = [\"png\", \"jpeg\", \"jpg\", \"gif\", \"tiff\", \"tif\", \"raw\", \"bmp\"]\nSRC_FOLDER = \"images/source\"\nOUT_FOLDER = \"images/output\"\n\nfilename_input = \"giraffe.bmp\"\nfilename_constraints = \"giraffe_marked.bmp\"\n\ninput_image = cv2.imread(filename_input, cv2.IMREAD_GRAYSCALE)\n\noutput_bgr = cv2.imread(filename_input)\noutput_rgb = cv2.cvtColor(output_bgr, cv2.COLOR_BGR2RGB)\noutput_yuv = rgb2yiq(output_rgb)\n\ninput_image = (input_image.astype(np.float))/255.\n\nconstraint_bgr = cv2.imread(filename_constraints)#, cv2.IMREAD_GRAYSCALE)\nconstraint_rgb = cv2.cvtColor(constraint_bgr, cv2.COLOR_BGR2RGB)\nconstraint_yuv = rgb2yiq(constraint_rgb)#cv2.cvtColor(constraint_bgr, cv2.COLOR_BGR2YUV)\n\n#np.set_printoptions(threshold=np.nan)\n\n#print(\"yuv\")\n#print(constraint_yuv[:,:,1])\n\n#print(\"bgr\")\n#print(constraint_bgr[:,:,1])\n\n#exit()\n\nheight, width = input_image.shape\n\nnum_pixels = width*height\n\nprint(num_pixels)\n\n\n\n#Construct sparse diagonal matrix\nD = csc_matrix((np.ones(num_pixels),(range(num_pixels),range(num_pixels))),shape=(num_pixels,num_pixels))#np.zeros((num_pixels, num_pixels),dtype = np.float)#np.diag(np.ones(num_pixels))#,dtype=np.float))\n#W = np.zeros((num_pixels, num_pixels),dtype = np.float)\n\nW_val=[]\nW_i = []\nW_j = []\n\nmin_sigma = 0.0001\n\nwindow = 5\n\n\n#Construct sparse weight matrix\nfor i_pixel in range(num_pixels):\n\t\n\t#D[i,i] = 1.0\n\t\n\th = i_pixel/width\n\tw = i_pixel%width\n\t\n\tcount_row = 0\n\tsum_row = 0.\n\t\n\twindow_pixels = input_image[max(0,h-window):min(h+window,height-1),max(0,w-window):min(w+window,width-1)]\n\t#window_pixel_locations = range( \n\t\n\tsigma = max(np.std(window_pixels), min_sigma)\n\t#print(sigma)\n\t#if (sigma == 0.0):\n\t#\tprint(sigma, min_sigma, np.std(window_pixels))\n\t\n\tfor j in range(-window, window):\n\t\t#for k in range(-window, window):\n\t\tfor k in range(-window, window):\n\t\t\tif (h+j >= 0 and w+k >= 0 and h+j < height and w+k < width):# and (k != 0 and j != 0)):\n\t\t\t\t#print(h,h+j,height,w,w+k,width)\n\t\t\t\tj_pixel = (h+j)*width + (w+k)\n\t\t\t\tW_val.append(np.exp(-np.square(input_image[h, w] - input_image[h+j, w+k])/(2*sigma**2)))\n\t\t\t\tW_i.append(i_pixel)\n\t\t\t\tW_j.append(j_pixel)\n\t\t\t\tcount_row+=1\n\t\t\t\tsum_row+=np.exp(-np.square(input_image[h, w] - input_image[h+j, w+k])/(2*sigma**2))\n\tfor k in range(count_row):\n\t\tW_val[len(W_val)-1-k] /= sum_row\n\t#print(i_pixel, num_pixels)\n\t\t\nW = csc_matrix((W_val, (W_i, W_j)),shape=(num_pixels,num_pixels))\n\nprint(\"Created weight matrix\")\n\n#M = 0.5*((D-W)+(D-W).T)\n#Calculate Laplacian matrix\nL = D-W\n\nprint(\"Calculated Laplacian\")\n\n#for i in range(num_pixels):\n#\tsum_row = sum(W[i,:])\n#\tW[i,:] = W[i,:]/sum_row\n\t\n#print(sum(W[5,:]))\n#A_u = np.zeros((num_pixels, num_pixels),dtype = np.float)\n#bu = np.zeros((num_pixels),dtype = np.float)\n#Au_val = []\n#Au_i = []\n#Au_j = []\n\n#bu_val = []\n#bu_i = []\n#bu_j = []\n\n#A_v = np.zeros((num_pixels, num_pixels),dtype = np.float)\n#bv = np.zeros((num_pixels),dtype = np.float)\n#Av_val = []\n#Av_i = []\n#Av_j = []\n\n#bv_val = []\n#bv_i = []\n#bv_j = []\n\n#print(constraint_yuv[:,:,1])\n\n#Convert to lil_matrix for modification of structure\nLu = lil_matrix(L)\nLv = Lu.copy()\n\n#Modify Laplacian matrix to account for fixed colored pixels (i.e. boundary conditions)\n\nprint(\"Converted Laplacian matrix to lil sparse type\")\n\nsmall = 0.001\n\n#rows_u_constraints, cols_u_constraints = np.where(constraint_yuv[:,:,1]>small)\n#rows_v_constraints, cols_v_constraints = np.where(constraint_yuv[:,:,2]>small)\nrows_u_constraints, cols_u_constraints = np.nonzero(constraint_yuv[:,:,1]-output_yuv[:,:,1])\nrows_v_constraints, cols_v_constraints = np.nonzero(constraint_yuv[:,:,2]-output_yuv[:,:,2])\n#rows_u_constraints, cols_u_constraints = np.where(np.abs(constraint_yuv[:,:,1]-colored_image_yuv[:,:,1])>small)\n#rows_v_constraints, cols_v_constraints = np.where(np.abs(constraint_yuv[:,:,2]-colored_image_yuv[:,:,2])>small)\n\n\nbu = np.zeros((num_pixels), dtype=np.float)\nbv = np.zeros((num_pixels), dtype=np.float)\n\nprint(len(rows_u_constraints), ' u constraints')\n\n\n\nfor index in range(len(rows_u_constraints)):\n\ti = rows_u_constraints[index]\n\tj = cols_u_constraints[index]\n\tpixel = i*width+j\n\t#print(pixel, num_pixels)\n\trow_vector = csr_matrix(([1.],([0],[pixel])),shape=(1,num_pixels))\n\tLu[pixel,:] = row_vector\n\tbu[pixel] = constraint_yuv[i,j,1]\n\nprint(len(rows_v_constraints), ' v constraints')\n\n\nfor index in range(len(rows_v_constraints)):\n\ti = rows_v_constraints[index]\n\tj = cols_v_constraints[index]\n\tpixel = i*width+j\n\trow_vector = csr_matrix(([1.],([0],[pixel])),shape=(1,num_pixels))\n\tLv[pixel,:] = row_vector\n\tbv[pixel] = constraint_yuv[i,j,2]\t\n\n\n#Converting back to csc for algebra\nLu_csc = csc_matrix(Lu)\nLv_csc = csc_matrix(Lv)\n\n\nprint(\"Modified Laplacian with constraints\")\n\nu_solution = spsolve(Lu_csc, bu)\n\nprint(\"Solved for U\")\n\nv_solution = spsolve(Lv_csc, bv)\n\nprint(\"Solved for V\")\n\nu_solution_reshaped = np.reshape(u_solution, (height, width))\nv_solution_reshaped = np.reshape(v_solution, (height, width))\n\n#colored_image = rgb2yiq(input_image)\noutput_yuv[:,:,1] = (u_solution_reshaped)\noutput_yuv[:,:,2] = (v_solution_reshaped)\n\noutput_rgb = np.uint8(yiq2rgb(output_yuv))\n\noutput_bgr = cv2.cvtColor(output_rgb, cv2.COLOR_RGB2BGR)\ncv2.imwrite(\"output.jpg\", output_bgr)\n\n\n\t\t\n#u = np.zeros((num_pixels),dtype = np.floatco)\n#v = np.zeros((num_pixels),dtype = np.float)\n\n#Au = cvxopt.spmatrix(Au_val, Au_i, Au_j,size=(num_pixels,num_pixels))\n#bu = cvxopt.matrix(bu)#spmatrix(bu_val, bu_i, bu_j)\n\n\n\n#Av = cvxopt.spmatrix(Av_val, Av_i, Av_j,size=(num_pixels,num_pixels))\n#bv = cvxopt.matrix(bv)#spmatrix(bv_val, bv_i, bv_j)\n\n\n\n\n\n\n#q = cvxopt.matrix(np.zeros((num_pixels),dtype = np.float))\n\n#sol_u = cvxopt.solvers.qp(P=M,q=q)#,A=Au,b=bu)\n\n" }, { "alpha_fraction": 0.795121967792511, "alphanum_fraction": 0.8065040707588196, "avg_line_length": 49.25, "blob_id": "a4147c024bfc8cc93ae2bc4c0d07753b3a71f55e", "content_id": "59e71da643e18ae601f70037790124385ea12e23", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 615, "license_type": "no_license", "max_line_length": 213, "num_lines": 12, "path": "/bw_colorization/README.txt", "repo_name": "omarka/computervision", "src_encoding": "UTF-8", "text": "The file requires Python and the following libraries: OpenCV, cvxopt, scipy.\r\n\r\nLine 45 should contain the grayscale image.\r\nLine 46 should contain the mockup image.\r\n\r\nIn this project, we consider algorithms to add coloring to monochrome images. The implemented algorithms rely on the assumption that neighboring pixels with similar monochrome intensities will have similar colors.\r\n\r\nAlgorithm implemented:\r\nhttp://webee.technion.ac.il/people/anat.levin/papers/colorization-siggraph04.pdf\r\n\r\nRGB<->YIV conversion taken from:\r\nhttps://github.com/asafdav2/colorization_using_optimization/blob/master/color_conv.py\r\n" } ]
2
10clouds/citypulse
https://github.com/10clouds/citypulse
5780f6373134431329e92b3e8f7b5e582075b4ce
5cd4f2e7692ec2dc489934ee24b295c97b1d9ea1
e1d46e1899e577860d3017edb889f5e1931cbfbb
refs/heads/master
2019-07-30T17:25:59.360603
2012-04-15T09:34:43
2012-04-15T09:34:43
4,020,045
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5088853240013123, "alphanum_fraction": 0.5218093991279602, "avg_line_length": 23.27450942993164, "blob_id": "e29c215b5b857969f776fac1b6040f195b8d851a", "content_id": "0e84903993e6979d18e483bab76c05da15c08b8e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1240, "license_type": "no_license", "max_line_length": 74, "num_lines": 51, "path": "/citypulse/tweetcrawler/consumer.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport json\nimport datetime\n\nfrom django.conf import settings\nfrom dateutil.parser import parse\n\nfrom citypulse.main.models import Pulse\n\nimport requests as req\nfrom requests.exceptions import Timeout\n\n\nurl = getattr(settings, \"TWEET_CRAWLER_URL\", \"http://192.168.3.168:8080/\")\ntimeout = getattr(settings, \"TWEET_CRAWLER_TIMEOUT\", 1)\n\n\ndef fetch_tweets():\n while True:\n try:\n repl = req.get(url, timeout=timeout).content\n tweet = json.loads(repl)\n\n try:\n loc = tweet[\"coordinates\"][\"coordinates\"]\n\t\tloc.reverse()\n p = Pulse(\n typename = \"twitter\",\n title = tweet[\"user\"][\"name\"],\n data = tweet,\n # stupid twitter returns reversed geoloc\n location = loc,\n timestamp_added = datetime.datetime.now(),\n timestamp_created = parse(tweet[\"created_at\"])\n )\n\n p.save()\n print loc\n except:\n pass\n\n except Timeout:\n print \"Się utło..\"\n break\n\n\nif __name__ == \"__main__\":\n fetch_tweets()\n\ndef main():\n\tfetch_tweets()\n" }, { "alpha_fraction": 0.6357526779174805, "alphanum_fraction": 0.6357526779174805, "avg_line_length": 28.760000228881836, "blob_id": "388eb53dec873e3ef61a7367a97833b7b2787658", "content_id": "e46e4d13a3f09cb7de98d0ecbf43ecb67cd673a7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 744, "license_type": "no_license", "max_line_length": 66, "num_lines": 25, "path": "/scripts/load_warsaw_venues.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "# How to use:\n# python manage.py executescript -s scripts/load_warsaw_venues.py\nimport os\nimport json\nfrom citypulse.fsq.models import FSVenue\nfrom citypulse.settings.mongoconnection import connect\n\n\ndef main():\n this_dir = os.path.dirname(os.path.abspath(__file__))\n fixtures_path = os.path.join(this_dir, \"warsaw_venues.json\")\n\n # Connect to MongoDB\n connect()\n\n with open(fixtures_path) as f:\n fixtures = json.load(f)\n for venue in fixtures:\n FSVenue.objects.get_or_create(vid=venue[\"vid\"], defaults={\n \"name\": venue[\"name\"],\n \"location\": venue[\"location\"],\n \"people\": venue[\"people\"],\n \"full_data\": venue[\"full_data\"],\n })\n print FSVenue.objects.count()\n" }, { "alpha_fraction": 0.5332416296005249, "alphanum_fraction": 0.5520403385162354, "avg_line_length": 21.484535217285156, "blob_id": "4b2bfab207c53ccd1db7d421ebca309ca2d32178", "content_id": "fbc5ac11be424491105e4fe943f211afea1bf933", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2181, "license_type": "no_license", "max_line_length": 72, "num_lines": 97, "path": "/citypulse/tweetcrawler/crawler.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "# -*- coding: utf8 -*-\nimport json\n\nimport tweepy\n\nimport datetime\n\nfrom dateutil.parser import parse\n\nfrom citypulse.main.models import Pulse\n\nclass Conf(object):\n def __init__(self):\n self.__dict__.update(dict(\n consumer_key = \"YwHse5fsGmfRZRdSk4vpIw\",\n consumer_secret = \"WUenzyuJiZa9y8z6BGDlCkfVV97mJUNnvtgeCNwgn9M\",\n token = \"16293459-zazmLP0LoMHDgB1GlExgV3vYMqQ0Tva14DotxmBzw\",\n secret = \"xMi1vJhGBBAYpISQdtSg1JjYePcs3b7lL8GmkPT5csA\",\n addr = \"https://stream.twitter.com/1/statuses/filter.json\"\n ))\n\nclass listener(object):\n def __init__(self, q=None):\n self.q = q\n\n def on_data(self, data):\n if len(data) < 2:\n return True\n\n print len(data), type(data)\n try:\n int(data)\n except:\n pass\n else:\n return True\n\n try:\n tweet = json.loads(data)\n loc = tweet[\"coordinates\"][\"coordinates\"]\n p = Pulse(\n typename = \"twitter\",\n title = tweet[\"user\"][\"name\"],\n data = tweet,\n location = list(reversed(loc)),\n timestamp_added = datetime.datetime.now(),\n timestamp_created = parse(tweet[\"created_at\"])\n )\n\n p.save()\n print data\n except Exception as ex:\n print ex\n\n return True\n\n def on_error(self, e):\n print e\n return True\n\n def on_timeout(self):\n return True\n\ncreds = Conf()\n\nauth = tweepy.OAuthHandler(creds.consumer_key, creds.consumer_secret)\nauth.set_access_token(creds.token, creds.secret)\n\n\n\n#OAuthHook.consumer_key = tw_conf.consumer_key\n#OAuthHook.consumer_secret = tw_conf.consumer_secret\n#\n#oauth_hook = OAuthHook(tw_conf.token, tw_conf.secret, header_auth=True)\n\n#client = requests.session(hooks={'pre_request': oauth_hook})\n\ndata = {\n \"locations\": (20.772,52.105,21.261,52.36)\n #\"locations\": (0.772,2.105,21.261,52.36)\n}\n\n#r = client.post(tw_conf.addr, data=data)\n\n\n\nstream = tweepy.Stream(auth=auth, listener=listener())\nstream.filter(**data)\n\n#for i in r.iter_lines():\n# try:\n# tweet = json.loads(i)\n# q.put(tweet)\n# pp.pprint(tweet)\n# except Exception as e:\n# print i, e\n#\n" }, { "alpha_fraction": 0.7062937021255493, "alphanum_fraction": 0.7062937021255493, "avg_line_length": 27.600000381469727, "blob_id": "479f91646ce58ec4bcc9d03e1810627eb8c6e273", "content_id": "7d83430f965411ec1f45ea252e14a9fae710aa51", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 286, "license_type": "no_license", "max_line_length": 55, "num_lines": 10, "path": "/citypulse/fbcrawler/management/commands/fbcrawl.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "from django.core.management.base import BaseCommand\nfrom citypulse.fbcrawler.crawler import FacebookCrawler\n\nclass Command(BaseCommand):\n args = 'facebook crawl'\n help = 'crawl it'\n\n def handle(self, *args, **options):\n crawler = FacebookCrawler()\n crawler.run()\n" }, { "alpha_fraction": 0.7708779573440552, "alphanum_fraction": 0.7708779573440552, "avg_line_length": 32.35714340209961, "blob_id": "1adfe620decfa23f1a23ede442849080dbf89e75", "content_id": "4ae5fd686b000dbaabb50fda201f419d35d82d80", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 467, "license_type": "no_license", "max_line_length": 58, "num_lines": 14, "path": "/citypulse/api/urls.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "from django.conf.urls.defaults import *\nfrom citypulse.api.resources import EventAggregateResource\nfrom citypulse.api.pulseresource import PulseResource\nfrom citypulse.fsq.resources import FSVenueResource\n\nevent_resource = EventAggregateResource()\npulse_resouce = PulseResource()\nvenue_resource = FSVenueResource()\n\nurlpatterns = patterns('',\n (r'', include(pulse_resouce.urls)),\n (r'', include(event_resource.urls)),\n (r'', include(venue_resource.urls)),\n)\n" }, { "alpha_fraction": 0.5571997761726379, "alphanum_fraction": 0.5674445033073425, "avg_line_length": 26.88888931274414, "blob_id": "6b5118f4925041ae01ae403368ed7b81e9032566", "content_id": "360977f39594445cd4ce043474dc79912cf3a941", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1757, "license_type": "no_license", "max_line_length": 73, "num_lines": 63, "path": "/citypulse/fsq/core.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "import random\nimport foursquare\nfrom utils import calculate_squares\nfrom citypulse.settings.mongoconnection import connect\nfrom models import FSVenue\n\n\nACCESS_TOKENS = [\n \"2NKXN04BNBDW2TNBMVVOEVZB0PK14L12T2FJESUKGSAKCKJU\",\n \"ZWISNLLL2AYBLEHBWYRX5G0NAXYUD3DSSU4UYLGE4ENPUHJ1\",\n]\n# Connect to Foursquare API\nclient = foursquare.Foursquare(access_token=random.choice(ACCESS_TOKENS))\n# Connect to MongoDB.\nconnect()\n\n\ndef venues_for_point(point, radius=1000):\n lat_long = \"%s,%s\" % (point[0], point[1])\n response = client.venues.explore(params={\n \"ll\": lat_long,\n \"radius\": radius,\n \"limit\": 50\n })\n recommended = response[\"groups\"][0]\n items = recommended[\"items\"]\n\n venues = []\n\n for item in items:\n venue = item[\"venue\"]\n venues.append({\n \"vid\": venue[\"id\"],\n \"name\": venue[\"name\"],\n \"location\": [\n venue[\"location\"][\"lat\"],\n venue[\"location\"][\"lng\"],\n ],\n \"people\": venue[\"hereNow\"][\"count\"],\n \"full_data\": venue\n })\n return venues\n\n\ndef venues_for_city(bounds, divisions=20, radius=1000):\n u\"\"\"Warning! This function is really heavy!\n It crawls ALL venues for a given city bounds.\n \"\"\"\n squares = calculate_squares(bounds, divisions)\n # Store venue IDs so we won't get duplicates.\n visited = set()\n # For every square in a city...\n for i, point in enumerate(squares):\n print \"%g %%\" % (100. * i / len(squares))\n # Fetch venues.\n venues_list = venues_for_point(point, radius=radius)\n # Save venue to Mongo if necessary.\n for venue in venues_list:\n vid = venue[\"vid\"]\n if vid not in visited:\n visited.add(vid)\n fsv = FSVenue(**venue)\n fsv.save()\n" }, { "alpha_fraction": 0.508348822593689, "alphanum_fraction": 0.5380333662033081, "avg_line_length": 22.434782028198242, "blob_id": "bf9631a5026f354052d8e7bde05dbe90b170be1c", "content_id": "7fd519a59945bb3f6832733774eb6ddf1ed21446", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1078, "license_type": "no_license", "max_line_length": 54, "num_lines": 46, "path": "/citypulse/fsq/utils.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "WARSAW_BOUNDS = {\n 'ne': {\n 'lat': 52.324,\n 'lng': 21.116,\n },\n 'sw': {\n 'lat': 52.113,\n 'lng': 20.904,\n },\n}\n\n\ndef square_dimensions(bounds):\n \"\"\"width x height\"\"\"\n width = bounds[\"ne\"][\"lng\"] - bounds[\"sw\"][\"lng\"]\n height = bounds[\"ne\"][\"lat\"] - bounds[\"sw\"][\"lat\"]\n return width, height\n\n\ndef get_nw_point(bounds):\n return (\n bounds[\"ne\"][\"lat\"],\n bounds[\"sw\"][\"lng\"],\n )\n\n\ndef calculate_squares(bounds, divisions=10):\n u\"\"\"Divides city bounds into squares.\n Returns list of square centers.\n \"\"\"\n dimensions = square_dimensions(bounds)\n width_unit = dimensions[0] / (1. * divisions)\n height_unit = dimensions[1] / (1. * divisions)\n\n start_point = get_nw_point(bounds)\n start_lat = start_point[0]\n start_lng = start_point[1]\n\n squares = []\n\n for x in xrange(divisions):\n lng = start_lng + (x + 0.5) * width_unit\n for y in xrange(divisions):\n lat = start_lat - (y + 0.5) * height_unit\n squares.append((lat, lng))\n return squares\n" }, { "alpha_fraction": 0.73046875, "alphanum_fraction": 0.73828125, "avg_line_length": 16, "blob_id": "b08a413b636bd9a2043e1fac050db4f0ecc504a2", "content_id": "0fa917f57e0bb235c30eb20bb78c3a8edf304a8f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 256, "license_type": "no_license", "max_line_length": 49, "num_lines": 15, "path": "/bin/python_manage_exec", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\nCMD=$1\n\nHOME=\"/home/citypulse.devel\"\nPROJECT_ROOT=\"$HOME/citypulse/app\"\nDJANGO_SETTINGS_MODULE=\"citypulse.settings.devel\"\n\n. \"$HOME/citypulse/bin/activate\"\ncd \"$HOME/citypulse/app\"\n\nshift 1\nARGUMENTS=$*\n\nexec python manage.py $CMD $ARGUMENTS \n" }, { "alpha_fraction": 0.5914552807807922, "alphanum_fraction": 0.6201602220535278, "avg_line_length": 29.5510196685791, "blob_id": "1ddfa3c8e2a7c2a4aaa6021d17277167bd5ab1a6", "content_id": "382326777d26482d63463dd1d266bf86806e8b33", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1498, "license_type": "no_license", "max_line_length": 113, "num_lines": 49, "path": "/citypulse/clustering/visualise.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "import math\nimport random\n\nfrom Tkinter import Tk, Canvas, mainloop\n\nfrom citypulse.clustering.core import do_clustering, normalise_data, \\\n denormalise_point\nfrom clustering_algs import Hierarchical, KMeans\n\n\nSIZE = (600, 400)\n\n\ndef load_data():\n from citypulse.fsq.models import FSVenue\n from citypulse.settings.mongoconnection import connect\n connect()\n return FSVenue.objects.values_list('location')\n\ndef drawcircle(canv,x,y,rad):\n canv.create_oval(x-rad, y-rad, x+rad, y+rad, width=0, fill='blue')\n\ndef draw_data(points, data):\n master = Tk()\n w = Canvas(master, width=SIZE[0]*2, height=SIZE[1])\n w.pack()\n maxx, minx, maxy, miny = [f(l) for l in [[p[0] for p in points], [p[1] for p in points]] for f in [max, min]]\n dx, dy = maxx - minx, maxy - miny\n for x, y, radious in data:\n drawcircle(w, (x-minx) / dx *SIZE[0]+SIZE[0],\n (y - miny) / dy * SIZE[1], math.sqrt(radious))\n for x, y in points:\n drawcircle(w, (x-minx) / dx*SIZE[0], (y-miny) / dy *SIZE[1], 2)\n mainloop()\n\n\ndef present_data():\n# points = [(random.random(), random.random()) for _ in xrange(10000)]\n points = load_data()\n# do_clustering(points)\n points = normalise_data(points)\n test_clusterers = [Hierarchical(t) for t in [0.5, 0.8, 1.1, 1.15, 1.151]] + \\\n [KMeans(k) for k in [5, 10, 15, 25]]\n for cl in test_clusterers:\n data = cl.cluster(points)\n draw_data(points, data)\n\ndef main():\n present_data()\n\n" }, { "alpha_fraction": 0.6047130227088928, "alphanum_fraction": 0.6151653528213501, "avg_line_length": 24.543689727783203, "blob_id": "adb1948bb643394a79c85e8651ab727f7e8fbb70", "content_id": "5bbaf5fdf864557a68958150783f81849f537241", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5262, "license_type": "no_license", "max_line_length": 86, "num_lines": 206, "path": "/citypulse/settings/defaults.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "import os\nimport tempfile\n\n# connect to mongo using settings defined in mongoconnection module\n# - this way it is completely independent of django's settings`\nfrom mongoconnection import connect\nconnect()\n\nTWEET_CRAWLER_URL = \"http://localhost:8081/\"\nTWEET_CRAWLER_TIMEOUT = 1\n\nDEBUG = True\nTEMPLATE_DEBUG = DEBUG\nJS_DEBUG = DEBUG\n\n_tempdir = tempfile.tempdir or '/tmp'\nROOT_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))\n\nADMINS = ()\nMANAGERS = ADMINS\nDATABASES = {}\n\nTIME_ZONE = 'Europe/Warsaw'\nLANGUAGE_CODE = 'en-us'\nSITE_ID = 1\nUSE_I18N = True\nUSE_L10N = True\nUSE_TZ = True\n\nMEDIA_ROOT = ''\nMEDIA_URL = ''\nSTATIC_ROOT = os.path.join(ROOT_DIR, '..', 'collected_static')\n\nSTATIC_URL = '/static/'\n\n# Additional locations of static files\nSTATICFILES_DIRS = (\n os.path.join(ROOT_DIR, '..', 'static'),\n)\n\n# List of finder classes that know how to find static files in\n# various locations.\nSTATICFILES_FINDERS = (\n 'django.contrib.staticfiles.finders.FileSystemFinder',\n 'django.contrib.staticfiles.finders.AppDirectoriesFinder'\n)\n\n# Make this unique, and don't share it with anybody.\nSECRET_KEY = ''\n\n# List of callables that know how to import templates from various sources.\nTEMPLATE_LOADERS = (\n 'django.template.loaders.filesystem.Loader',\n 'django.template.loaders.app_directories.Loader',\n 'django.template.loaders.eggs.Loader',\n)\n\nMIDDLEWARE_CLASSES = (\n 'django.middleware.common.CommonMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n 'django.middleware.clickjacking.XFrameOptionsMiddleware',\n)\n\nROOT_URLCONF = 'citypulse.urls'\n\n# Python dotted path to the WSGI application used by Django's runserver.\nWSGI_APPLICATION = 'citypulse.wsgi.application'\n\nTEMPLATE_DIRS = (\n \"templates\",\n os.path.join(ROOT_DIR, '..', 'templates'),\n)\n\nCACHES = {\n 'default': {\n 'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',\n 'LOCATION': os.path.join(_tempdir, 'citypulse__file_based_cache'),\n }\n}\nSESSION_ENGINE = 'django.contrib.sessions.backends.cache'\n\nINSTALLED_APPS = (\n 'django.contrib.auth',\n 'django.contrib.contenttypes',\n 'django.contrib.sessions',\n 'django.contrib.sites',\n 'django.contrib.messages',\n 'django.contrib.staticfiles',\n 'django.contrib.admin',\n\n 'pipeline',\n 'south',\n 'bootstrap',\n\n 'citypulse.main',\n 'citypulse.clustering',\n 'citypulse.api',\n 'citypulse.fbcrawler',\n)\n\nSOUTH_TESTS_MIGRATE = False\n\nLOGGING = {\n 'version': 1,\n 'disable_existing_loggers': False,\n 'filters': {\n 'require_debug_false': {\n '()': 'django.utils.log.RequireDebugFalse'\n }\n },\n 'handlers': {\n 'mail_admins': {\n 'level': 'ERROR',\n 'filters': ['require_debug_false'],\n 'class': 'django.utils.log.AdminEmailHandler'\n }\n },\n 'loggers': {\n 'django.request': {\n 'handlers': ['mail_admins'],\n 'level': 'ERROR',\n 'propagate': True,\n },\n }\n}\n\nPIPELINE_CSS = {\n 'bootstrap': {\n 'source_filenames': (\n 'less/bootstrap/bootstrap.less',\n 'less/base.less',\n ),\n 'output_filename': 'css/bootstrap.css',\n 'extra_context': {\n 'rel': 'stylesheet/less',\n },\n },\n 'bootstrap-responsive': {\n 'source_filenames': (\n 'less/bootstrap/responsive.less',\n ),\n 'output_filename': 'css/bootstrap-responsive.css',\n 'extra_context': {\n 'rel': 'stylesheet/less',\n },\n },\n\n}\n\nPIPELINE_JS = {\n 'main': {\n 'source_filenames': (\n 'js/main/init.js',\n 'js/maps/models.coffee',\n 'js/maps/views.coffee',\n 'js/markerclusterer.js',\n 'js/styledmarker.js',\n 'js/maps/controller.coffee',\n 'js/main/initcontroller.js',\n ),\n 'output_filename': 'js/main.js'\n },\n 'core': {\n 'source_filenames': (\n 'js/jquery-1.7.2.js',\n 'js/ejs.js',\n 'js/view.js',\n 'js/underscore.js',\n 'js/json2.js',\n 'js/backbone.js',\n 'js/bootstrap.js',\n 'js/init.js',\n 'js/markerclusterer.js',\n ),\n 'output_filename': 'js/core.min.js',\n },\n 'less': {\n 'source_filenames': (\n 'js/less-1.3.0.js',\n ),\n 'output_filename': 'js/less.min.js',\n },\n}\n\nPIPELINE = not DEBUG\nif PIPELINE:\n STATICFILES_STORAGE = 'pipeline.storage.PipelineCachedStorage'\n\nPIPELINE_COMPILERS = (\n 'pipeline.compilers.coffee.CoffeeScriptCompiler',\n 'pipeline.compilers.less.LessCompiler',\n)\nPIPELINE_LESS_BINARY = \"lessc\"\nPIPELINE_YUI_BINARY = os.path.join(ROOT_DIR, '..', 'bin', 'yuicompressor.sh')\nPIPELINE_COFFEE_SCRIPT_BINARY = os.path.join(ROOT_DIR, '..', 'bin', 'coffeefinder.sh')\n\nPIPELINE_TEMPLATE_FUNC = 'new EJS'\nPIPELINE_TEMPLATE_NAMESPACE = 'window.Template'\nPIPELINE_TEMPLATE_EXT = '.ejs'\n\n\nFACEBOOK_API_KEY = '277478075671648'\nFACEBOOK_API_SECRET = 'a2a3e31634e0f62b9e618c398ce10033'\n" }, { "alpha_fraction": 0.5730937123298645, "alphanum_fraction": 0.5818358659744263, "avg_line_length": 24.395061492919922, "blob_id": "2c4a513ff84adc296b3ca1cd2309490c8d9d7ce0", "content_id": "cf4825ad6251e411c7c074aa11ceedf6fc3274a7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2059, "license_type": "no_license", "max_line_length": 69, "num_lines": 81, "path": "/citypulse/clustering/clustering_algs.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "from itertools import izip, groupby\n\nimport numpy as np\nfrom scipy.cluster.vq import kmeans\nfrom scipy.cluster.hierarchy import fclusterdata\n\n\n\ndef dist(ax, ay, bx, by):\n return (ax-bx)**2 + (ay-by)**2\n\ndef closest(centroids, p):\n dists = [dist(centr[0], centr[1], *p) for centr in centroids]\n mind = min(dists)\n for i, pdist in enumerate(dists):\n if mind == pdist:\n return centroids[i]\n return None\n\ndef calc_closest(centroids, data):\n d = {}\n for p in data:\n clost_c = closest(centroids, p)\n d[clost_c] = d.setdefault(clost_c, 0) + 1\n return d.items()\n\n\nclass Clustering(object):\n\n def __init__(self):\n pass\n\n def cluster(self, points):\n data = np.array(points)\n return [c + (w,) for c, w in self._cluster(data)]\n\n\nclass KMeans(Clustering):\n\n DEF_K = 15\n DEF_ITERS = 30\n\n def __init__(self, k=None, iters=None):\n self.k = k or KMeans.DEF_K\n self.iters = iters or KMeans.DEF_ITERS\n\n def _cluster(self, points, k=None, iters=None):\n k = k or self.k\n iters = iters or self.iters\n centroids = kmeans(points, k, iter=iters)[0]\n centroids = [(c[0], c[1]) for c in centroids]\n return calc_closest(centroids, points)\n \n\ndef group_into_clusters(points, clusters_id):\n key_fun = lambda x: x[1]\n data = sorted(zip(points, clusters_id), key=key_fun)\n return [[x[0] for x in g] for k, g in groupby(data, key=key_fun)]\n\ndef calc_centroid(points):\n def calc_avg(l):\n if l:\n return sum(l) / len(l)\n else:\n return 0\n return tuple([calc_avg(l) for l in izip(*points)])\n\n\nclass Hierarchical(Clustering):\n\n DEF_T = 0.1\n\n def __init__(self, t=None):\n self.t = t or Hierarchical.DEF_T\n\n def _cluster(self, points, t=None):\n t = t or self.t\n tmpc = fclusterdata(points, t)\n clusters = group_into_clusters(points, tmpc)\n return [(calc_centroid(cluster), len(cluster))\n for cluster in clusters]\n\n\n" }, { "alpha_fraction": 0.5794481635093689, "alphanum_fraction": 0.5946717262268066, "avg_line_length": 31.84375, "blob_id": "e371d569cde3277ff35e453251f0a6b763b40845", "content_id": "69b0de84e3fbbd23406219d391e81c5aac512523", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1051, "license_type": "no_license", "max_line_length": 91, "num_lines": 32, "path": "/citypulse/kulturalnie/kulturalnie/spiders/kulturalnie_spider.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "from scrapy.contrib.spiders import Rule, CrawlSpider\nfrom scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor\nfrom scrapy.selector import HtmlXPathSelector\n\nfrom kulturalnie.items import EventItem\n\nclass KulturalnieSpider(CrawlSpider):\n name = \"kulturalnie\"\n allowed_domains = [\"kulturalnie.waw.pl\"]\n start_urls = [\n \"http://kulturalnie.waw.pl/wydarzenia/2012/04/15/\",\n ]\n\n rules = (\n Rule(\n SgmlLinkExtractor(\n allow=\"/wydarzenia/2012/04/15/\\d+/$\",\n ),\n follow=True,\n callback='parse_event'\n ),\n )\n\n def parse_event(self, response):\n self.log('Hi, this is an event page! %s' % response.url)\n\n # hxs = HtmlXPathSelector(response)\n # item = EventItem()\n # item['id'] = hxs.select('//td[@id=\"item_id\"]/text()').re(r'ID: (\\d+)')\n # item['name'] = hxs.select('//td[@id=\"item_name\"]/text()').extract()\n # item['description'] = hxs.select('//td[@id=\"item_description\"]/text()').extract()\n # return item\n" }, { "alpha_fraction": 0.6006028652191162, "alphanum_fraction": 0.6058779358863831, "avg_line_length": 30.571428298950195, "blob_id": "a7fd037a6091ec3e721f5bad4b27433b11e8fddb", "content_id": "435a5ad8c8d2aca7be83549104f2eb2696801009", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1327, "license_type": "no_license", "max_line_length": 73, "num_lines": 42, "path": "/citypulse/api/pulseresource.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "import json\n\nfrom tastypie import fields\n\nfrom citypulse.api.resources import BaseResource\nfrom citypulse.main.models import Pulse\n\n\nclass PulseResource(BaseResource):\n title = fields.CharField(attribute='title', null=True)\n url = fields.CharField(attribute='url', null=True)\n data = fields.DictField(attribute='data', null=True)\n\n # list with geo [lat, lng]\n location = fields.ListField(attribute='location')\n created = fields.DateTimeField(attribute='timestamp_created')\n added = fields.DateTimeField(attribute='timestamp_added')\n\n class Meta:\n resource_name = 'pulses'\n object_class = Pulse\n\n #----------------------------------------------\n # CUSTOM METHODS\n #----------------------------------------------\n\n def get_object_list(self, request):\n \"\"\"Method getting a list of objects.\n\n Example filters:\n * getting from within box:\n ``?format=json&location__within_box=[[1, 50], [60, 56]]``\n \"\"\"\n filters = self.extract_filters(request)\n all_objs = Pulse.objects().filter(typename='twitter')\n\n return self.apply_filters(all_objs, filters)\n\n def get_object(self, pk, request=None):\n \"\"\"Method getting single EventAggregate object by its pk (id)\n \"\"\"\n return Pulse.objects.get(pk=pk)\n\n" }, { "alpha_fraction": 0.5810645222663879, "alphanum_fraction": 0.5837625861167908, "avg_line_length": 30.8359375, "blob_id": "84b47edb2196421c932915836eaaccce37a34e06", "content_id": "9db3c35390b6e378ea35a18bc83ae178bb5cb46b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4077, "license_type": "no_license", "max_line_length": 77, "num_lines": 128, "path": "/citypulse/api/resources.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "import json\nfrom tastypie.resources import Resource\nfrom tastypie import fields\n\nfrom citypulse.main.models import EventAggregate\n\n\nclass BaseResource(Resource):\n def __init__(self):\n super(BaseResource, self).__init__()\n\n # DICT COMPREHENSION YO\n self.fields_attributes = {f: v.attribute for f, v in\n self.fields.iteritems()}\n\n def extract_filters(self, request):\n \"\"\"Return filters with their values from a request.\n \"\"\"\n selected_filters = {}\n\n for filt, val in request.GET.iteritems():\n filtered_field, sep, filt = self.filtered_field(filt)\n if filtered_field:\n # custom rules for filters that require it\n if filtered_field == \"location\":\n # JSON loads to get list instead of string\n val = json.loads(val)\n\n selected_filters[filtered_field + sep + filt] = val\n\n return selected_filters\n\n def filtered_field(self, name):\n \"\"\"Return if this filter name is one that we can filter.\n \"\"\"\n # using __ to pass arguments directly to QuerySet filter method\n basename, sep, filt = name.partition(\"__\")\n return self.fields_attributes.get(basename), sep, filt\n\n def apply_filters(self, result, filters):\n \"\"\"Apply filters to resulting QuerySet and return its modified\n version.\n \"\"\"\n return result.filter(**filters)\n\n #----------------------------------------------\n # TASTYPIE'S METHODS\n #----------------------------------------------\n\n def get_resource_uri(self, bundle_or_obj):\n kwargs = {\n 'resource_name': self._meta.resource_name,\n 'pk': bundle_or_obj.obj.pk\n }\n\n if self._meta.api_name is not None:\n kwargs['api_name'] = self._meta.api_name\n\n return self._build_reverse_url(\"api_dispatch_detail\", kwargs=kwargs)\n\n def obj_get_list(self, request=None, **kwargs):\n return self.get_object_list(request)\n\n def obj_get(self, request=None, **kwargs):\n return self.get_object(kwargs['pk'], request)\n\n def obj_create(self, bundle, request=None, **kwargs):\n return {}\n\n def obj_update(self, bundle, request=None, **kwargs):\n pass\n\n def obj_delete_list(self, request=None, **kwargs):\n pass\n\n def obj_delete(self, request=None, **kwargs):\n pass\n\n def rollback(self, bundles):\n pass\n\n\nclass EventAggregateResource(BaseResource):\n \"\"\"\n API that will be used for drawing actual data on the frontend site\n\n Should take params like: lat, lng, radius, zoom_level, timestamp\n\n Will be available at:\n http://localhost:8000/api/events/?format=json\n\n :warning: Can't use tastypie's filtering as we don't make use of Django's\n models. That's why we must create custom filtering method.\n \"\"\"\n\n title = fields.CharField(attribute='title', null=True)\n url = fields.CharField(attribute='url', null=True)\n image_url = fields.CharField(attribute='image_url', null=True)\n\n # list with geo [lat, lng]\n location = fields.ListField(attribute='location')\n radius = fields.FloatField(attribute='radius', null=True)\n ts = fields.DateTimeField(attribute='timestamp')\n\n class Meta:\n resource_name = 'events'\n object_class = EventAggregate\n\n #----------------------------------------------\n # CUSTOM METHODS\n #----------------------------------------------\n\n def get_object_list(self, request):\n \"\"\"Method getting a list of objects.\n\n Example filters:\n * getting from within box:\n ``?format=json&location__within_box=[[1, 50], [60, 56]]``\n \"\"\"\n filters = self.extract_filters(request)\n all_objs = EventAggregate.objects()\n\n return self.apply_filters(all_objs, filters)\n\n def get_object(self, pk, request=None):\n \"\"\"Method getting single EventAggregate object by its pk (id)\n \"\"\"\n return EventAggregate.objects.get(pk=pk)\n\n\n" }, { "alpha_fraction": 0.6206225752830505, "alphanum_fraction": 0.6206225752830505, "avg_line_length": 26.052631378173828, "blob_id": "1182dd3451e464b44a781de7c0148c91e94a2634", "content_id": "82ccd685311eada87a77a9a877707b55ae77083b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1028, "license_type": "no_license", "max_line_length": 71, "num_lines": 38, "path": "/citypulse/fbcrawler/models.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "from mongoengine import Document, StringField, GeoPointField\nfrom mongoengine.fields import IntField, DateTimeField, BooleanField, \\\n ReferenceField\nfrom citypulse.main.models import Event\n\n\nclass FBEvent(Document):\n fbid = IntField()\n name = StringField()\n location = GeoPointField()\n start_time = DateTimeField()\n end_time = DateTimeField()\n attendant_num = IntField()\n last_crawl_time = DateTimeField()\n add_time = DateTimeField()\n description = StringField()\n event_added = BooleanField(default=False)\n attendance_pulses_added = BooleanField(default=False)\n event = ReferenceField(Event)\n meta = {\n 'indexes': [{\n 'fields': ['fbid'],\n 'unique': True,\n }]\n }\n\nclass FBPlace(Document):\n fbid = IntField()\n name = StringField()\n location = GeoPointField()\n last_crawl_time = DateTimeField()\n add_time = DateTimeField()\n meta = {\n 'indexes': [{\n 'fields': ['fbid'],\n 'unique': True,\n }]\n }\n" }, { "alpha_fraction": 0.6934235692024231, "alphanum_fraction": 0.698259174823761, "avg_line_length": 34.620689392089844, "blob_id": "f9797e10658b2cd0442dd2a05b8dfdd1a3d9a912", "content_id": "6f5ee611f950707062ed32317d64873fedc642b5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1034, "license_type": "no_license", "max_line_length": 75, "num_lines": 29, "path": "/citypulse/fsq/resources.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "import json\nfrom tastypie import fields\n\nfrom citypulse.api.resources import BaseResource\nfrom citypulse.fsq.models import FSVenue\n\n\nclass FSVenueResource(BaseResource):\n name = fields.CharField(attribute=\"name\")\n url = fields.CharField(attribute=\"url\", null=True)\n image_url = fields.CharField(attribute=\"image_url\", null=True)\n location = fields.ListField(attribute=\"location\")\n people = fields.IntegerField(attribute=\"people\", default=0)\n checkins = fields.IntegerField(attribute=\"checkins\", default=0)\n\n class Meta:\n resource_name = \"venues\"\n object_class = FSVenue\n\n def get_object_list(self, request):\n filters = self.extract_filters(request)\n all_objs = FSVenue.objects()\n # Filter venues with small number of checkins.\n # TODO: Configuration?\n all_objs = all_objs.filter(full_data__stats__checkinsCount__gt=500)\n return self.apply_filters(all_objs, filters)\n\n def get_object(self, pk, request=None):\n return FSVenue.objects.get(pk=pk)\n\n" }, { "alpha_fraction": 0.6321406364440918, "alphanum_fraction": 0.6503452658653259, "avg_line_length": 23.875, "blob_id": "14916b7762d78934647675437c5ebec7fd64f8c3", "content_id": "c7728134d828b543bba392b0223b979b4fc5d373", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1593, "license_type": "no_license", "max_line_length": 69, "num_lines": 64, "path": "/citypulse/clustering/core.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "import datetime\n\nfrom itertools import izip\n\nfrom citypulse.main.models import EventAggregate, Pulse\nfrom citypulse.clustering.clustering_algs import \\\n KMeans, Hierarchical\n\n\nTIMEDELTA = datetime.timedelta(seconds=60*15)\n\nKMEANS_PARAMS = [{'k': 40}, {'k': 20}, {'k': 10}]\nHIERARCH_PARAMS = [{'t': 1.15}, {'t': 1.}, {'t': 0.8}]\nCLUSTERER_CLS = KMeans\nCLUSTERER_PARAMS = KMEANS_PARAMS\n\nZOOM_LEVELS = range(3)\n\n\ndef normalise_data(points):\n return [(x / 180., y / 90.) for x, y in points]\n\ndef denormalise_point(x, y):\n return x * 180., y * 90.\n\n\ndef save_as_EventAggregate(zl, cluster_center):\n ea = EventAggregate(timestamp=datetime.datetime.now(),\n radius=cluster_center[2],\n zoom_level=zl,\n location=denormalise_point(*cluster_center[:2])\n )\n ea.save()\n\n\ndef do_clustering_at_zoom(points, **params):\n return get_clusterer().cluster(points)\n\ndef do_clustering(points):\n if not points:\n return\n data = normalise_data(points)\n for zl, params in izip(ZOOM_LEVELS, CLUSTERER_PARAMS):\n cls = do_clustering_at_zoom(data, **params)\n for cluster_center in cls:\n save_as_EventAggregate(zl, cluster_center)\n\n\ndef do_cron_cluster():\n points = Pulse.objects.filter(\n timestamp_added__gte=datetime.datetime.now()-TIMEDELTA).\\\n values_list('location')\n do_clustering(points)\n\n\ndef get_real_clusterer():\n return CLUSTERER_CLS()\n\nCLUSTERER = None\ndef get_clusterer():\n global CLUSTERER\n if not CLUSTERER:\n CLUSTERER = get_real_clusterer()\n return CLUSTERER\n\n" }, { "alpha_fraction": 0.45233428478240967, "alphanum_fraction": 0.46438685059547424, "avg_line_length": 36.52043533325195, "blob_id": "4c352282e27046cc25119491fb16d65d26a45028", "content_id": "f9c6a96a359f4eb4f97ed4abb7284965fd08b3ea", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13773, "license_type": "no_license", "max_line_length": 124, "num_lines": 367, "path": "/citypulse/fbcrawler/crawler.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "from facepy import GraphAPI\nfrom datetime import datetime\nimport dateutil.parser\n\nfrom citypulse.settings.mongoconnection import connect\nfrom citypulse.fbcrawler.models import FBEvent\nfrom citypulse.main.models import Event, Pulse\nimport requests\nimport sys\n\nconnect()\n\nclass ExtendedGraphAPI(GraphAPI):\n def ext_search(self, term=None, type=None, page=False, **options):\n SUPPORTED_TYPES = ['post', 'user', 'page', 'event', 'group', 'place', 'checkin', 'location']\n if type not in SUPPORTED_TYPES:\n raise ValueError('Unsupported type \"%s\". Supported types are %s' % (type, ', '.join(SUPPORTED_TYPES)))\n\n options = dict({\n 'type': type,\n }, **options)\n if term is not None:\n options['q'] = term\n\n response = self._query('GET', 'search', options, page)\n\n return response\n\n def get_event_members(self, eid):\n data = {\n 'eid': eid,\n 'access_token': self.oauth_token,\n 'format': 'json',\n }\n response = requests.request('GET', 'https://api.facebook.com/method/events.getMembers', params=data)\n return self._parse(response.content)\n\n\n def _query__(self, method, path, data={}, page=False):\n \"\"\"\n Fetch an object from the Graph API and parse the output, returning a tuple where the first item\n is the object yielded by the Graph API and the second is the URL for the next page of results, or\n ``None`` if results have been exhausted.\n\n :param method: A string describing the HTTP method.\n :param url: A string describing the URL.\n :param data: A dictionary of HTTP GET parameters (for GET requests) or POST data (for POST requests).\n :param page: A boolean describing whether to return an iterator that iterates over each page of results.\n \"\"\"\n\n def load(method, url, data):\n if method in ['GET', 'DELETE']:\n print 'FETCH %s %s' % (url, data)\n response = requests.request(method, url, params=data, allow_redirects=True)\n\n if method in ['POST', 'PUT']:\n files = {}\n\n for key in data:\n if hasattr(data[key], 'read'):\n files[key] = data[key]\n\n for key in files:\n data.pop(key)\n print 'FETCH %s %s' % (url, data)\n response = requests.request(method, url, data=data, files=files)\n\n result = self._parse(response.content)\n\n try:\n next_url = result['paging']['next']\n except (KeyError, TypeError):\n next_url = None\n\n return result, next_url\n\n def paginate(method, url, data):\n while url:\n result, url = load(method, url, data)\n\n # Reset pagination parameters.\n for key in ['offset', 'until', 'since']:\n try:\n del data[key]\n except KeyError:\n pass\n\n yield result\n\n # Convert option lists to comma-separated values.\n for key in data:\n if isinstance(data[key], list) and all([isinstance(item, basestring) for item in data[key]]):\n data[key] = ','.join(data[key])\n\n url = 'https://graph.facebook.com/%s' % path\n\n if self.oauth_token:\n data['access_token'] = self.oauth_token\n\n if page:\n return paginate(method, url, data)\n else:\n return load(method, url, data)[0]\n\nclass FacebookCrawler(object):\n def __init__(self):\n self.my_access_token = 'AAAAAAITEghMBAEB4ycKYFASJGG0mLKUVBZCRE5sLEPZCZA6hf6eaZAEtzjnzrpGtueP7Fr5ZAKGbaGpjQfHfd0R9lL8fTh0VDmDZAv8TScVAZDZD'\n self.app_access_token = '277478075671648|09zBQ07viLmvpclisoJ6_enXRb8'\n self.user_access_token = 'AAAD8XWYiJGABAK27T3azGEI3tM2qHPolEWBIecQGOxZAevx2oh47zu2DAaVb7QZC3lPWzvW3P5HBLa0DrZATWlIimuFIix4ldhw2t1pGgZDZD'\n self.graph = ExtendedGraphAPI(self.app_access_token)\n #self.graph = ExtendedGraphAPI(self.app_access_token)\n\n def crawl_places(self, position, distance):\n (x, y) = position\n center = ','.join([str(x), str(y)])\n result = self.graph.ext_search(type='place',\n page=True,\n center=center,\n distance=str(distance),\n )\n for r in result:\n data = r['data']\n for d in data:\n x = d['location']['latitude']\n y = d['location']['longitude']\n short_data = {\n 'id': d['id'],\n 'name': d['name'],\n 'location': (x, y),\n }\n #print short_data\n yield short_data\n\n def crawl_objects(self, object_type, position, distance,\n by_place=False, use_keywords=True, custom_keywords=[], graph=None):\n if graph is None:\n graph = self.graph\n\n if by_place:\n places_data = self.crawl_places(position, distance)\n for pd in places_data:\n result = graph.ext_search(\n type=object_type,\n page=True,\n place=pd['id'],\n )\n for r in result:\n data = r['data']\n for d in data:\n yield d\n else:\n (x, y) = position\n center = ','.join([str(x), str(y)])\n #or queries do not work?\n if use_keywords:\n keywords = custom_keywords\n else:\n keywords = [None]\n for keyword in keywords:\n #print keyword\n result = self.graph.ext_search(\n term=keyword,\n type=object_type,\n page=True,\n center=center,\n distance=str(distance),\n #since=datetime.now()\n )\n for r in result:\n data = r['data']\n for d in data:\n yield d\n\n def in_boundary(self, position, boundary_position, dist):\n #TODO: this is just stupid\n if position is None:\n return False\n ratio = 360.0 / 42000000.0\n x, y = position\n bx, by = boundary_position\n delta = ratio * dist\n #TODO:\n return (bx - delta <= x\n and x <= bx + delta\n and by - delta <= y\n and y <= by + delta)\n\n\n def crawl_events(self, position, distance, custom_keywords=None):\n crawl_time = datetime.now()\n graph = ExtendedGraphAPI(self.app_access_token)\n data = self.crawl_objects('event', position, distance, by_place=False, custom_keywords=custom_keywords, graph=graph)\n for d in data:\n evd = {\n 'id': d['id'],\n 'start_time':d['start_time'],\n 'end_time':d['end_time'],\n 'name': d['name'],\n }\n q = FBEvent.objects(fbid=evd['id'])\n if len(q) == 1:\n #print 'found in db %s' % evd['id']\n ev = q[0]\n evd['location'] = tuple(ev.location) if ev.location is not None else None\n if self.in_boundary(evd['location'], position, distance):\n evd['model'] = ev\n yield evd\n else:\n result = self.graph.get(path=evd['id'])\n #print result\n try:\n x = result['venue']['latitude']\n y = result['venue']['longitude']\n event_position = (x, y)\n if not self.in_boundary(event_position, position, distance):\n event_position = None\n except Exception:\n event_position = None\n\n ev = FBEvent()\n ev.fbid = evd['id']\n ev.name = evd['name']\n\n ev.start_time = dateutil.parser.parse(evd['start_time'])\n ev.end_time = dateutil.parser.parse(evd['end_time'])\n ev.add_time = datetime.now()\n ev.last_crawl_time = crawl_time\n ev.attendant_num = 0\n\n if event_position is not None:\n evd['location'] = event_position\n ev.location = evd['location']\n ev.save()\n evd['model'] = ev\n yield evd\n else:\n #also save not matchin elements\n ev.save()\n\n\n def crawl_checkins(self, position, distance):\n graph = ExtendedGraphAPI(self.my_access_token)\n data = self.crawl_objects('checkin', position, distance, by_place=False, use_keywords=False, graph=graph)\n for d in data:\n position = None\n place_id = None\n try:\n x = d['place']['location']['latitude']\n y = d['place']['location']['longitude']\n place_id = d['place']['id']\n position = (x, y)\n except Exception:\n pass\n chd = {\n 'id': d['id'],\n 'location': position,\n 'place_id': place_id,\n }\n yield chd\n\n def crawl_locations(self, position, distance):\n places_data = self.crawl_places(position, distance)\n for pd in places_data:\n print 'got pd = %s' % pd\n result = self.graph.ext_search(type='location',\n page=True,\n place=pd['id'],\n )\n for r in result:\n for d in r['data']:\n yield d\n\n def run(self):\n self.event_run()\n #self.checkin_run()\n\n def event_run(self):\n keywords = ['warszawa', 'warsaw', 'warszawie', 'waw', 'hackwaw',\n 'stodola', 'hydrozagadka', 'palladium', 'proxima',\n 'fabryka', 'trzciny', 'sen', 'pszczoly', 'plan', 'arkadia', 'tarasy',\n 'sala', 'kongresowa', 'dom', 'kultury',\n 'festiwal', 'koncert', 'noc', 'impreza', 'party', 'mecz', 'manifestacja',\n 'prezentacja', 'wystawa', 'wystawy', 'projekcja', 'dyskusja', 'debata', 'spotkanie',\n 'w', 'na', 'o', 'we', 'i', 'a', '@']\n event_iter = self.crawl_events((52.228641, 21.015558), 15000, custom_keywords=keywords)\n\n crawl_time = datetime.now()\n\n sys.stdout.write('crawling events\\n')\n sys.stdout.flush()\n\n for evd in event_iter:\n fbev = evd['model']\n if not fbev.event_added:\n ev = Event()\n ev.title = fbev.name\n ev.typename = 'facebook_event'\n ev.location = fbev.location\n ev.start_time = fbev.start_time\n ev.end_time = fbev.end_time\n ev.description = fbev.description\n ev.image_url = None\n ev.url = 'http://www.facebook.com/event/%s/' % fbev.fbid\n ev.save()\n fbev.event_added = True\n fbev.event = ev\n fbev.save()\n msg = 'added %s, id=%s' % (ev.title, fbev.fbid)\n sys.stdout.write('\\n%s\\n' % msg)\n sys.stdout.flush()\n else:\n sys.stdout.write('.')\n sys.stdout.flush()\n sys.stdout.write('\\n')\n sys.stdout.flush()\n\n sys.stdout.write('crawling event attendance\\n')\n sys.stdout.flush()\n\n q = FBEvent.objects(attendance_pulses_added=False)\n for fbev in q:\n event_url = 'http://www.facebook.com/event/%s/' % fbev.fbid\n ev = fbev.event\n if ev is None:\n ev_q = Event.objects(url=event_url)\n if len(ev_q) == 1:\n ev = ev_q[0]\n if ev:\n result = self.graph.get_event_members(fbev.fbid)\n if 'attending' in result:\n for fbid in result['attending']:\n p = Pulse()\n p.title = fbev.name\n p.typename = 'facebook_attendance'\n p.event = ev\n p.data = {'uid': fbid, 'eid': fbev.fbid}\n p.timestamp_added = datetime.now()\n p.timestamp_created = fbev.start_time\n p.location = fbev.location\n p.url = event_url\n p.save()\n sys.stdout.write('.')\n sys.stdout.flush()\n fbev.attendance_pulses_added = True\n if fbev.event is None:\n fbev.event = ev\n fbev.save()\n sys.stdout.write('\\n')\n sys.stdout.flush()\n\n def checkin_run(self):\n checkin_iter = self.crawl_checkins((52.228641, 21.015558), 15000)\n for chd in checkin_iter:\n print chd\n\n\n\n\n\n\n #iter = self.crawl_events((52.228641, 21.015558), 15000)\n #iter = self.crawl_places((52.228641, 21.015558), 15000)\n #iter = self.crawl_locations((52.228641, 21.015558), 15000)\n #iter = self.crawl_events((52.228641, 21.015558), 15000)\n #iter = self.crawl_places((52.228641, 21.015558), 20000)\n #for obj in iter:\n # print obj\n\n\n\n" }, { "alpha_fraction": 0.6454652547836304, "alphanum_fraction": 0.6478209495544434, "avg_line_length": 20.225000381469727, "blob_id": "1510de81cac3577aec355f17e1d97f4e72fe2bbf", "content_id": "18a72d8f41165d14e60699c3c370fa8290df10f8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 849, "license_type": "no_license", "max_line_length": 62, "num_lines": 40, "path": "/citypulse/settings/devel.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "from defaults import *\n\nDEBUG = True\nTEMPLATE_DEBUG = DEBUG\nJS_DEBUG = DEBUG\nPIPELINE = True\n\nDATABASES.update({\n 'default': {\n 'ENGINE': 'django.db.backends.mysql',\n 'NAME': 'citypulse_devel',\n 'PORT': '',\n 'USER': 'citypulse_devel',\n 'PASSWORD': 'test12',\n 'HOST': '',\n 'OPTIONS': {}\n },\n})\n\nKEY_PREFIX = 'stable_citypulse'\n\nMIDDLEWARE_CLASSES = tuple(list(MIDDLEWARE_CLASSES) + [\n 'pipeline.middleware.MinifyHTMLMiddleware',\n])\n\n# Production Mail settings\nSERVER_EMAIL = DEFAULT_FROM_EMAIL = 'noreply@citypulse'\nEMAIL_HOST = 'localhost'\nEMAIL_PORT = 25\nEMAIL_USE_TLS = False\nEMAIL_HOST_USER = None\nEMAIL_HOST_PASSWORD = None\n\n# django sentry\nSENTRY_DSN = None\nINSTALLED_APPS = INSTALLED_APPS + (\n 'raven.contrib.django',\n)\n\nSTATICFILES_STORAGE = 'pipeline.storage.PipelineCachedStorage'\n" }, { "alpha_fraction": 0.5820635557174683, "alphanum_fraction": 0.5855463743209839, "avg_line_length": 34.890625, "blob_id": "b421196bf833ce8a79dfec3d6d1ecf2f209ffa83", "content_id": "03f9379d296432f71244e47bcc08742f9ec1317f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2297, "license_type": "no_license", "max_line_length": 85, "num_lines": 64, "path": "/citypulse/main/management/commands/executescript.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport os\nimport sys\nimport time\nimport logging\nimport optparse\n\nfrom django.core.management.base import BaseCommand, CommandError\n\nlog = logging.getLogger('executescript.management.commands.executescript')\n\n\nclass Command(BaseCommand):\n \"\"\"Execute any python script with :func:`main` function. This includes\n Django environment setup.\n \"\"\"\n\n option_list = BaseCommand.option_list + (\n optparse.make_option('-s', '--script', dest='script_path',\n metavar=\"FILE\",\n help=\"Execute given script with django command context\"),\n optparse.make_option('-c', '--callback-name', dest='callback_name',\n help=\"Find and execute callback with given name\"),\n )\n\n def handle(self, *args, **options):\n script = options.get('script_path', None)\n callback = options.get('callback_name', None)\n if not script and not callback:\n raise CommandError('either \"script\" or \"callback\" parameter is required')\n\n if script:\n if not script.endswith('.py'):\n script += '.py'\n if not os.path.isfile(script):\n raise CommandError('Script \"%s\" does not exist' % script)\n\n sys.path.insert(0, os.path.dirname(script))\n\n mod_name, ext = os.path.splitext(os.path.basename(script))\n mod = __import__(mod_name, globals(), locals(), [], -1)\n mod_callback = getattr(mod, options.get('callback_name') or 'main')\n\n if callback:\n function_name = callback.split('.')[-1]\n mod_name = '.'.join(callback.split('.')[:-1])\n mod = __import__(mod_name, globals(), locals(), [function_name], -1)\n mod_callback = getattr(mod, function_name, None)\n\n if mod_callback is None:\n raise CommandError('\"%s\" script does not contain \"%s\" function' % \\\n (script, options['callback_name']))\n\n start_time = time.time()\n try:\n mod_callback(*args)\n except Exception:\n work_time = time.time() - start_time\n log.exception(\"Script fail: %s (%.2fsec)\", script, work_time)\n raise\n\n work_time = time.time() - start_time\n log.info(\"Script done: %s %.2fsec\", mod_callback, work_time)\n" }, { "alpha_fraction": 0.6332931518554688, "alphanum_fraction": 0.6357056498527527, "avg_line_length": 27.586206436157227, "blob_id": "c4f79353f94a05466b3d66e5bf631e0f5c4a3896", "content_id": "88b8af0d649abd45610cc8a62d61dcaa21e82ec3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 829, "license_type": "no_license", "max_line_length": 71, "num_lines": 29, "path": "/citypulse/urls.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "from django.conf import settings\nfrom django.conf.urls import patterns, include, url\n\nfrom django.contrib import admin\nadmin.autodiscover()\n\n\ndef bad(request):\n \"\"\" Simulates a server error \"\"\"\n 1 / 0\n\nurlpatterns = patterns('citypulse',\n\n url(r'^$', 'main.views.index', name='index'),\n url(r'^about/$', 'main.views.about', name='about'),\n url(r'^_admin/', include(admin.site.urls)),\n url(r'^api/', include('citypulse.api.urls')),\n\n (r'^bad/$', bad),\n)\n\n## In DEBUG mode, serve media files through Django.\nif settings.DEBUG:\n # Remove leading and trailing slashes so the regex matches.\n media_url = settings.MEDIA_URL.lstrip('/').rstrip('/')\n urlpatterns += patterns('',\n (r'^%s/(?P<path>.*)$' % media_url, 'django.views.static.serve',\n {'document_root': settings.MEDIA_ROOT}),\n )\n" }, { "alpha_fraction": 0.499529629945755, "alphanum_fraction": 0.5098776817321777, "avg_line_length": 32.21875, "blob_id": "e77c84c9d7178251f9e905de1bc0d3f86ea29d30", "content_id": "e985cd1b093213fc2798bdb19279cd1680efe2f9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1063, "license_type": "no_license", "max_line_length": 119, "num_lines": 32, "path": "/citypulse/main/static/js/main/initcontroller.js", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "$(function() {\n citypulse.initializeGoogleMap();\n var controller = new citypulse.Controller;\n\n setTimeout(function(){\n\n $('#hour-filter input[type=range]').toggleDisabled();\n\n $('#hour-filter input[type=checkbox]').click(function(target){\n $('#hour-filter input[type=range]').toggleDisabled();\n });\n\n $('#hour-filter input[type=range]').change(function(ev){\n\n if( $('#hour-filter input[type=checkbox]').attr('checked') === \"checked\" ){\n var startHour = parseInt($(ev.target).attr('value'));\n var endHour = startHour + 1;\n var d = new Date();\n var startDate = d.getFullYear() + \"-\" + (d.getMonth()+1) + \"-\" + d.getDate() + \" \" + startHour + \":00\";\n var endDate = d.getFullYear() + \"-\" + (d.getMonth()+1) + \"-\" + d.getDate() + \" \" + endHour + \":00\";\n\n $(\"#date-placeholder\").html(startHour + \"h\");\n controller.changeDateRange(startDate, endDate); \n }\n\n\n });\n\n },1000);\n\n\n});\n" }, { "alpha_fraction": 0.6617581248283386, "alphanum_fraction": 0.6644355058670044, "avg_line_length": 27.367088317871094, "blob_id": "b98cbe945a419a8aa92f6cc03cb294e70ddac35e", "content_id": "aba399b68c8162cde48b5f30379c74ffc6c1e431", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2241, "license_type": "no_license", "max_line_length": 75, "num_lines": 79, "path": "/citypulse/main/models.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "from mongoengine import Document, DateTimeField, StringField, URLField, \\\n GeoPointField, ReferenceField, FloatField, DictField, IntField\n\n\nclass Location(Document):\n \"\"\"City location. The radius field is city's radius in (SOMEUNIT HERE).\n \"\"\"\n name = StringField()\n location = GeoPointField()\n radius = FloatField(min_value=0.0)\n\n\nclass Event(Document):\n \"\"\"Event document - an event gathered from crawlers.\n\n Although mongo does not enforce choices for typename field, its\n value should be one of:\n * twitter\n * 4square\n * facebook\n These all defined in TYPENAME_CHOICES set.\n \"\"\"\n TYPENAME_CHOICES = frozenset((\"4square_venue\", \"facebook_event\"))\n\n title = StringField()\n typename = StringField()\n location = GeoPointField()\n start_time = DateTimeField()\n end_time = DateTimeField()\n description = StringField()\n image_url = URLField()\n url = URLField()\n\n\nclass Pulse(Document):\n \"\"\"Pulse document - a pulse (tweet, checkin, message etc.) that can\n be linked to a particular Event, or exist on its own.\n\n Although mongo does not enforce choices for typename field, its\n value should be one of:\n * twitter\n * 4square\n * facebook\n These all defined in TYPENAME_CHOICES set.\n\n TODO description or data?\n motivation: data can be a dictionary or a EmbeddedDocument\n that can store arbitrary data.\n \"\"\"\n TYPENAME_CHOICES = frozenset((\"twitter\", \"4square\", \"facebook\"))\n\n title = StringField()\n typename = StringField()\n data = DictField()\n event = ReferenceField(Event)\n timestamp_added = DateTimeField()\n timestamp_created = DateTimeField()\n location = GeoPointField()\n url = URLField()\n\n\nclass EventAggregate(Document):\n \"\"\"An aggregate of events, within certain radius and bound to\n specified zoom level.\n\n This is the actual data sent to frontend.\n\n TODO: define slice_types (in minutes? seconds?)\n \"\"\"\n\n location = GeoPointField()\n timestamp = DateTimeField()\n slice_type = IntField()\n radius = FloatField()\n zoom_level = IntField()\n title = StringField()\n description = StringField()\n image_url = URLField()\n url = URLField()\n" }, { "alpha_fraction": 0.6600967645645142, "alphanum_fraction": 0.6624261140823364, "avg_line_length": 35.717105865478516, "blob_id": "be0a77f698a0f84fdbceeac837b2d97bcc7aa7e0", "content_id": "ecce351cbaf2fd52fe115a12b4b6861bba3af696", "detected_licenses": [], "is_generated": false, "is_vendor": true, "language": "Python", "length_bytes": 5581, "license_type": "no_license", "max_line_length": 166, "num_lines": 152, "path": "/fabfile.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "from fabric import colors, api\nfrom fabric import operations\nfrom fabric.contrib.project import upload_project\nimport os\n\n\nPROJECT_NAME = 'citypulse'\nPROJECT_ROOT = '/home/citypulse.%s/citypulse/'\nDJANGO_ROOT = '%sapp' % PROJECT_ROOT\nVIRTUAL_ENV = '. %sbin/activate' % PROJECT_ROOT\nYUI_LOCATION = '%s/bin/yuicompressor-2.4.7.jar' % DJANGO_ROOT\nHOST = '[email protected]' # change to actual host\n\n\nDEVEL = 'citypulse.settings.devel'\nSTABLE = 'citypulse.settings.stable'\n\n\ndef provision():\n\n # we are asking for all necessery data upfront so that we can go to grab a coffee or a fast 1v1 while it's installing\n version = operations.prompt('Please specify target version stable or devel: ', validate=r'^(stable|devel)$')\n db = operations.prompt('Please specify database engine [mysql|postgresql]: ', validate=r'^(mysql|postgresql)$')\n\n db_password = 'test12'\n db_user = \"%s_%s\" % (PROJECT_NAME, version,)\n system_user = \"%s.%s\" % (PROJECT_NAME, version,)\n project_root = PROJECT_ROOT % version\n local_project_dir = os.path.abspath(os.path.dirname(__file__))\n\n with api.settings(warn_only=True):\n api.sudo('mkdir ~/.ssh')\n operations.put('~/.ssh/id_rsa.pub', '~/.ssh/authorized_keys', use_sudo=True)\n\n api.sudo('apt-get update')\n\n # install base packages\n\n # installing core stuff for a web server\n api.sudo('apt-get install -q -y python-virtualenv subversion mercurial python-all-dev curl python-flup nginx supervisor git')\n\n # install database\n # TODO make it a seperate command\n if db == 'mysql':\n\n api.sudo('echo \"mysql-server mysql-server/root_password password %s\" | debconf-set-selections' % db_password)\n api.sudo('echo \"mysql-server mysql-server/root_password_again password %s\" | debconf-set-selections' % db_password)\n api.sudo('apt-get install -q -y mysql-client mysql-server')\n\n # create database and setup user\n api.sudo(\"echo \\\"CREATE USER '%s'@'localhost' IDENTIFIED BY '%s';\\\" | mysql --password=%s\" % (db_user, db_password, db_password))\n api.sudo(\"echo \\\"GRANT ALL PRIVILEGES ON *.* TO '%s'@'localhost';\\\" | mysql --password=%s\" % (db_user, db_password))\n api.sudo(\"echo \\\"CREATE DATABASE %s DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_general_ci;\\\" | mysql --password=%s\" % (db_user, db_password))\n\n elif db == 'postgresql':\n raise Exception('Not implemented')\n\n # setup user name and account\n # TODO make it a seperate command\n api.sudo('useradd %s -m' % system_user)\n api.sudo('mkdir /home/%s/.ssh' % system_user, user=system_user)\n operations.put('~/.ssh/id_rsa.pub', '/home/%s/.ssh/authorized_keys' % system_user, use_sudo=True)\n\n # setup base directory structure\n # /home/citypulse.version/log\n # /home/citypulse.version/citypulse\n\n api.sudo('mkdir -p %slog' % (project_root), user=system_user)\n api.sudo('mkdir -p %sapp' % (project_root), user=system_user)\n\n # setting up virtualenv\n api.sudo('virtualenv --no-site-packages %s' % (project_root), user=system_user)\n # TODO fetch required libraries for virtualenv\n\n # upload local version of app to remote server, you will still have to setup remote server auth keys\n # hackish solution but besides changing upload_projects code I dont see how to solve the problems of permissions\n api.sudo('chown -R %s:%s /home/%s/' % ('ubuntu', 'ubuntu', system_user))\n with api.cd(project_root):\n upload_project(local_project_dir, project_root)\n\n api.sudo('chown -R %s:%s /home/%s/' % (system_user, system_user, system_user))\n\n # TODO setup nginx config\n # TODO setup supervisor config\n # TODO restart nginx and supervisor\n pass\n\n\ndef show(msg, *args):\n print colors.cyan('==>', bold=True), msg % args\n\n\ndef update(version):\n show(colors.blue('updating %s repository' % PROJECT_NAME))\n with api.cd(DJANGO_ROOT % version):\n api.run('git stash')\n api.run('git pull --rebase')\n api.run('git diff')\n\n\ndef collectstatic(version):\n show(colors.blue('collecting static files'))\n with api.prefix(VIRTUAL_ENV % version):\n with api.cd(DJANGO_ROOT % version):\n with api.settings(warn_only=True):\n api.run('mkdir collected_static')\n api.run('export YUI_COMPRESSOR_LIB=%s && ./manage.py collectstatic --noinput --settings=%s.settings.%s' % (YUI_LOCATION % version, PROJECT_NAME, version))\n\n\ndef migrate(settings):\n show(colors.blue('running migration scripts'))\n with api.prefix(VIRTUAL_ENV):\n with api.cd(DJANGO_ROOT):\n api.run('./manage.py migrate --settings=%s' % settings)\n\n\ndef supervisor(service, action):\n show(colors.blue('running \"%s\" for %s daemon services'), service, action)\n with api.settings(warn_only=True):\n api.run('sudo supervisorctl %s %s' % (action, service))\n\n\ndef service(service, action):\n show(colors.blue('running \"%s\" for %s daemon services'), service, action)\n with api.settings(warn_only=True):\n api.run('sudo /etc/init.d/%s %s' % (service, action,))\n\n\ndef install_requirements():\n show(colors.blue('updating %s repository' % PROJECT_NAME))\n with api.cd(PROJECT_ROOT):\n api.run('pip install -r requirements.txt')\n api.run('pip install -r requirements-server.txt')\n\n\ndef deploy():\n update('devel')\n collectstatic('devel')\n supervisor('%s.devel' % PROJECT_NAME, 'restart')\n service('nginx', 'reload')\n\n\ndef stop():\n supervisor('devel', 'stop')\n\n\ndef start():\n supervisor('devel', 'start')\n\n\ndef restart():\n supervisor('devel', 'restart')\n" }, { "alpha_fraction": 0.5622065663337708, "alphanum_fraction": 0.565727710723877, "avg_line_length": 26.483871459960938, "blob_id": "cd9422a0a4a8cd889ffab69f62a37b22a72390e4", "content_id": "eceb318123e070a5f52ea71840057683705bd64c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 852, "license_type": "no_license", "max_line_length": 76, "num_lines": 31, "path": "/citypulse/fsq/models.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "from mongoengine import (Document, StringField, GeoPointField, IntField,\n DictField)\n\n\nclass FSVenue(Document):\n vid = StringField()\n name = StringField()\n location = GeoPointField()\n people = IntField()\n full_data = DictField()\n\n def __unicode__(self):\n return u\"%s %s\" % (self.name, self.location)\n\n def url(self):\n return \"https://foursquare.com/v/%s\" % self.vid\n\n def image_url(self):\n try:\n icon_data = self.full_data[\"categories\"][0][\"icon\"]\n size = icon_data[\"sizes\"][0]\n icon = \"%s%s%s\" % (icon_data[\"prefix\"], size, icon_data[\"name\"])\n except (IndexError, KeyError):\n icon = None\n return icon\n\n def checkins(self):\n try:\n return self.full_data[\"stats\"][\"checkinsCount\"]\n except KeyError:\n return 0\n" }, { "alpha_fraction": 0.5438596606254578, "alphanum_fraction": 0.6184210777282715, "avg_line_length": 29.399999618530273, "blob_id": "a09aaa8f57ab842d70173418e3b985994ffba304", "content_id": "20d0b1bbe498eda8cd9187f9a4bece17caa7ae48", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 912, "license_type": "no_license", "max_line_length": 78, "num_lines": 30, "path": "/scripts/initial_data.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "from datetime import datetime\n\nfrom citypulse.main.models import *\n\n\ndef main():\n\n ## cities\n Location.objects.delete()\n\n warsaw = Location(name='Warsaw', location=[52.238, 21.045], radius=5)\n warsaw.save()\n\n newyork = Location(name='New York', location=[40.749, -73.971], radius=5)\n newyork.save()\n\n ## some mocked EventAggregates in Warsaw\n EventAggregate.objects.delete()\n\n tenc = EventAggregate(title='10Clouds HQ', location=[52.23988, 21.09154],\n url=\"http://10clouds.com\", timestamp=datetime.now())\n tenc.save()\n\n zebra = EventAggregate(title='Zebra tower', location=[52.22968, 21.01223],\n url=\"http://hackwaw.com\", timestamp=datetime.now())\n zebra.save()\n\n pkin = EventAggregate(title='PKiN', location=[52.23236, 21.00633],\n url=\"http://pekin.cn\", timestamp=datetime.now())\n pkin.save()\n" }, { "alpha_fraction": 0.7283236980438232, "alphanum_fraction": 0.7283236980438232, "avg_line_length": 25.615385055541992, "blob_id": "3bbeda7b70030e5e4edc99411598cebba34c90e3", "content_id": "d9e840b94df44f44a4a79aec1d7526c37eed50ea", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 346, "license_type": "no_license", "max_line_length": 71, "num_lines": 13, "path": "/citypulse/main/views.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "from django.shortcuts import render\nfrom citypulse.main.models import Location\n\n\ndef index(request):\n\n locations = Location.objects.all()\n return render(request, 'main/index.html', {'locations': locations})\n\n\ndef about(request):\n locations = Location.objects.all()\n return render(request, 'main/about.html', {'locations': locations})\n" }, { "alpha_fraction": 0.7353760600090027, "alphanum_fraction": 0.7353760600090027, "avg_line_length": 26.615385055541992, "blob_id": "4ac65526c947e809d62c6b41c856a2bcd171e737", "content_id": "e3913b97994b761ace25e52c482fcebc8937a474", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 359, "license_type": "no_license", "max_line_length": 67, "num_lines": 13, "path": "/citypulse/settings/mongoconnection.py", "repo_name": "10clouds/citypulse", "src_encoding": "UTF-8", "text": "# MONGO connection\nimport mongoengine\n\n# requires anonymous access for mongo - configure user/pass later\nMONGO_DB_NAME = \"citypulse\"\n\ndef connect():\n \"\"\"Connection function. Use to globally connect to mongo within\n Python process.\n\n Connection settings are defines in this module.`\n \"\"\"\n mongoengine.connect(MONGO_DB_NAME) # global connection\n" } ]
28
erincr/CVDRisk
https://github.com/erincr/CVDRisk
adea9b41a2dd9a5e9efc8ed0aa9a1fae33ea3531
e161fb028615455cbf07796bd687066d00a1a329
46c12201bb0e0ad9d7a95a892d2fe439fc8d4c57
refs/heads/master
2020-09-30T07:38:03.949354
2019-12-13T23:39:35
2019-12-13T23:39:35
227,241,887
2
0
null
null
null
null
null
[ { "alpha_fraction": 0.6635262370109558, "alphanum_fraction": 0.6662180423736572, "avg_line_length": 38.105262756347656, "blob_id": "8323f7360003cb360df36b8eda601785d12a5090", "content_id": "87e18217d0c4e537edcec3b26416e970e6a9cf2f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 803, "license_type": "no_license", "max_line_length": 109, "num_lines": 19, "path": "/README.md", "repo_name": "erincr/CVDRisk", "src_encoding": "UTF-8", "text": "# CVD Risk\n\nClass project aiming to predict 10-year probability of cardiovascular disease, and to incorporate zip codes. \n\nIn this repo you'll find\n\n .\n ├── README.md\n | └─ This doc\n ├── PhenotypeDefinitionsAndHelpers.py\n | └─ Define comorbidities and medications of interest.\n ├── PrepareData.py\n | └─ Load data, define columns of interest, perform a train/validation/test split.\n ├── ClusterZipCodes.py, KMedioids.py\n | └─ Draw Kaplan-Meier curves, compute pairwise distances and cluster zip codes\n ├── HyperparameterSearch.py\n | └─ Run the hyperparameter search; write performance metrics to a .csv.\n └── CVDRisk\n └─ Run the full pipeline, and visualize results.\n" }, { "alpha_fraction": 0.49142390489578247, "alphanum_fraction": 0.5080316066741943, "avg_line_length": 30.401708602905273, "blob_id": "ffe17e709d448c845df35aafe68f5d2ccd7874ff", "content_id": "f646c9bdd488b9f61837ca38cd5225f462a8bee1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3673, "license_type": "no_license", "max_line_length": 95, "num_lines": 117, "path": "/code/HyperparameterSearch.py", "repo_name": "erincr/CVDRisk", "src_encoding": "UTF-8", "text": "import time\n\nimport numpy as np\n\nfrom sklearn.linear_model import LogisticRegression\nfrom sklearn.ensemble import GradientBoostingClassifier\nfrom sklearn import metrics\n\n\ndef unoC(t, p, o, w):\n tt = t[t == 1]\n pt = p[t == 1]\n ot = o[t == 1]\n wt = w[t == 1]\n \n def chunked_cindex(chunk, bufsize = len(ot)):\n for start in range(0, bufsize + chunk, chunk):\n s, e = start, min(start + chunk, bufsize)\n print(s)\n oc_bool_matrix = o > ot[s:e].reshape(-1,1)\n pr_bool_matrix = p < pt[s:e].reshape(-1,1)\n both = np.logical_and(oc_bool_matrix, pr_bool_matrix)\n \n n = np.sum(both, axis=1).dot(1/wt[s:e]**2)\n d = np.sum(pr_bool_matrix, axis=1).dot(1/wt[s:e]**2)\n yield n, d\n \n info = [(n,d) for n,d in chunked_cindex(10000)]\n c = sum([x[0] for x in info])/sum([x[1] for x in info])\n print(c)\n return(c)\n\ndef get_metrics(dataset, model, columns):\n df = get_dataset(dataset)\n \n if dataset == \"train\":\n pr = model.predict_proba(df[df.Censored == 0][columns])[:, 1]\n unoc = 0\n else:\n pr = model.predict_proba(df[columns])[:, 1]\n unoc = unoC(df.AnyOutcome.values, pr, df.DaysToEventOrCensoring.values, df.IPCW.values)\n pr = pr[df.Censored == 0]\n \n \n df = df[df.Censored == 0]\n \n acc = np.mean(1 * (pr > .5) == df.AnyOutcome.values)\n aucpr = metrics.average_precision_score(df.AnyOutcome.values, pr)\n aucroc = metrics.roc_auc_score(df.AnyOutcome.values, pr)\n \n return({\"unoC_\" + dataset: unoc,\n \"acc_\" + dataset: acc,\n \"aucPR_\" + dataset: aucpr,\n \"aucROC_\" + dataset: aucroc\n })\n\ndef gb_paramsearch(DEPTH, COLUMNS, COLNAME, START = 0, ENDIX = 11):\n gbt = pd.DataFrame({\n \"trees\": range(START, 1600), \n \"columns\": COLNAME, \n \"depth\" : DEPTH,\n \"unoC_train\": 0,\n \"acc_train\": 0,\n \"aucPR_train\": 0,\n \"aucROC_train\": 0,\n \"unoC_val\": 0,\n \"acc_val\": 0,\n \"aucPR_val\": 0,\n \"aucROC_val\": 0})\n\n gb = GradientBoostingClassifier(\n random_state=0, \n verbose = True,\n min_samples_leaf = 5,\n max_depth = DEPTH,\n n_estimators = START,\n subsample = 1,\n learning_rate=0.1\n )\n if START > 0:\n print(\"pretraining!\")\n gb.fit(\n train.loc[tra_ix, COLUMNS], \n train.loc[tra_ix, \"AnyOutcome\"], \n sample_weight = train.loc[tra_ix, \"IPCW\"]\n )\n \n \n t0 = time.time()\n for i in range(1, ENDIX):\n if i % 10 == 0:\n print(i)\n print(time.time() - t0)\n t0 = time.time()\n \n _ = gb.set_params(n_estimators= START + 20 * i, warm_start=True) \n\n gb.fit(\n train.loc[tra_ix, COLUMNS], \n train.loc[tra_ix, \"AnyOutcome\"], \n sample_weight = train.loc[tra_ix, \"IPCW\"]\n )\n print(gb.n_estimators_)\n\n for d in [\"train\", \"val\"]:\n gc.collect()\n ms = get_metrics(d, gb, COLUMNS) #get_metrics(d, gb, COLUMNS)\n print(ms)\n for k, v in ms.items():\n gbt.loc[gbt.trees == gb.n_estimators_, k] = v\n \n gbt = gbt[~(gbt.unoC_val == 0)].reset_index(drop=True)\n previous = pd.read_csv(\"./Performance_Metrics/metric_df.csv\")\n \n gbt = pd.concat([previous, gbt])\n print(\"Finished! We have this many rows in our data frame:\", len(gbt))\n gbt.to_csv(\"./Performance_Metrics/metric_df.csv\", index=False)" }, { "alpha_fraction": 0.4304291307926178, "alphanum_fraction": 0.5702210664749146, "avg_line_length": 30.070707321166992, "blob_id": "7fbe19079c79e1a8bde7280240a63b6f7f21329c", "content_id": "6b0918ef59bd8fe50283afb43967d7b6688174f0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3076, "license_type": "no_license", "max_line_length": 92, "num_lines": 99, "path": "/code/PhenoytypeDefinitionsAndHelpers.py", "repo_name": "erincr/CVDRisk", "src_encoding": "UTF-8", "text": "import numpy as np\nimport re\nimport os\n\npath = \"\"\ndef getfile(x):\n return(os.path.join(path,x))\n\nmake_regex = lambda x: re.compile(\"|\".join(x))\n\n\nchf_icd09 = [\"(428.)\", \"(39891)\", \"(402[019]1)\",\"(404[019][13])\", \"(425[456789])\"]\nchf_icd10 = [\"(I099)\", \"(I110)\", \"(I13[02])\", \"(I255)\", \"(I42[056789])\", \"(I43.)\", \"(I50.)\"]\n\nmi_icd09 = [\"(410.)\", \"(412)\"]\nmi_icd10 = [\"(I21.)\", \"(I22.)\"]\n\nstroke_icd09 = [\"(434)\", \"(433.1)\",\"(V1254)\"]\nstroke_icd10 = [\"(I6[123456].)\"]\n\nckd_icd09 = [\"(585)\", \"(585[0123456789])\"]\nckd_icd10 = [\"(N18[1234569])\"]\n\nra_icd09 = [\"(714[0123])\"]\nra_icd10 = [\"M06[089]\", \"M05[0123456789]\"]\n\ndia_t1_icd09 = [\"(250[0123456789][139])\", \"(2500)\", \"(25000)\", \"(250)\"]\ndia_t1_icd10 = [\"(E109)\"]\n\ndia_t2_icd09 = [\"(250[0123456789][02])\"]\ndia_t2_icd10 = [\"(E119)\"]\n\nhypercholesterolemia_icd09 = [\"(2720)\"]\nhypercholesterolemia_icd10 = [\"(E7801)\"]\n\nhypertension_icd09 = [\"(401)|(401[019])\"]\nhypertension_icd10 = [\"(I10)\"]\n\nhyperlipidemia_icd09 = [\"(272[24])\"]\nhyperlipidemia_icd10 = [\"(E784)\"]\n\ntobacco_icd09 = [\"(3051)\"]\ntobacco_icd10 = [\"(F17200)\"]\n\nclass Definitions:\n def __init__(self):\n self.chf = chf_icd09 + chf_icd10\n self.chf_regx = make_regex(self.chf)\n\n self.mi = mi_icd09 + mi_icd10\n self.mi_regx = make_regex(self.mi)\n\n self.stroke = stroke_icd09 + stroke_icd10\n self.stroke_regx = make_regex(self.stroke)\n\n self.ckd = ckd_icd09 + ckd_icd10\n self.ckd_regx = make_regex(self.ckd)\n\n self.ra = ra_icd09 + ra_icd10\n self.ra_regx = make_regex(self.ra)\n\n self.dia_t1 = dia_t1_icd09 + dia_t1_icd10\n self.dia_t1_regx = make_regex(self.dia_t1)\n\n self.dia_t2 = dia_t2_icd09 + dia_t2_icd10\n self.dia_t2_regx = make_regex(self.dia_t2)\n\n self.hyperchol = hypercholesterolemia_icd09 + hypercholesterolemia_icd10\n self.hyperchol_regx = make_regex(self.hyperchol)\n\n self.hypertension = hypertension_icd09 + hypertension_icd10\n self.hypertension_regx = make_regex(self.hypertension)\n\n self.hyperlipidemia = hyperlipidemia_icd09 + hyperlipidemia_icd10\n self.hyperlipidemia_regx = make_regex(self.hyperlipidemia)\n\n self.tobacco = tobacco_icd09 + tobacco_icd10\n self.tobacco_regx = make_regex(self.tobacco)\n\n self.medclasses = {\n \"Insulin\" : \"682008\",\n \"Statin\": \"240608\",\n \"Sulfonylureas\": \"682020\",\n \"Thiazolidinediones\": \"682028\",\n \"ACEInhibitor\": \"243204\",\n \"BBlocker\": \"242400\",\n \"ABlocker\": \"242000\",\n \"Statin\": \"240608\",\n \"Dihydropyridines\" : \"242808\", # hypertension\n \"Antiinflammatory\" : \"840600\",\n \"Antiinflammatory2\": \"563600\",\n \"Biguanides\" : \"175735\",\n \"CalciumChannelBlockers\" : \"242892\",\n \"LoopDiuretics\" : \"402808\",\n \"AntiCoagulants\" : \"201204\",\n \"CholesterolAbsorptionInhibitors\" : \"240605\",\n \"NitrateNitrite\" : \"241208\",\n \"Cardiotonic\": \"240408\"\n }\n" }, { "alpha_fraction": 0.5741176605224609, "alphanum_fraction": 0.5872941017150879, "avg_line_length": 37.59090805053711, "blob_id": "f6ae8fbdfc4f5a6c5ed773be226b20198bcb4960", "content_id": "11a951eb6dd94a712060204ab4954a6e5dae62b8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4250, "license_type": "no_license", "max_line_length": 134, "num_lines": 110, "path": "/code/ClusterZipCodes.py", "repo_name": "erincr/CVDRisk", "src_encoding": "UTF-8", "text": "import pandas as pd\nimport numpy as np\nimport scipy\nimport math\n\nfrom kmedioids import cluster\n\nimport time\n\ndef ComputeZipCodeDistances(kms, distances = None, save_path = None):\n if distances is not None:\n # We have already computed the distances - don't recompute!\n distances = np.load(distances)\n return(distances)\n \n def cartesian_product(*arrays):\n la = len(arrays)\n dtype = np.result_type(*arrays)\n arr = np.empty([len(a) for a in arrays] + [la], dtype=dtype)\n for i, a in enumerate(np.ix_(*arrays)):\n arr[...,i] = a\n return arr.reshape(-1, la)\n\n all_combos = cartesian_product(\n kms.index.drop_duplicates().values,\n kms.DaysToEventOrCensoring.drop_duplicates().sort_values().values\n )\n\n all_combos = pd.DataFrame(all_combos).rename({0:\"Zipcode_5\", 1:\"DaysToEventOrCensoring\"}, axis=1)\n all_combos.set_index([\"Zipcode_5\", \"DaysToEventOrCensoring\"], inplace=True)\n print(\"Cartesian product done\")\n\n all_combos_merged = all_combos.join(\n kms.reset_index(drop=False).set_index([\"Zipcode_5\", \"DaysToEventOrCensoring\"]), \n how='outer'\n )\n print(\"Join complete\")\n\n all_combos_merged[\"n_events\"].fillna(0, inplace=True)\n all_combos_merged[\"n_risk_filled\"] = all_combos_merged.groupby(\"Zipcode_5\").n_risk.fillna(method = 'ffill')\n all_combos_merged[\"n_risk_filled\"] = all_combos_merged.groupby(\"Zipcode_5\").n_risk_filled.transform(lambda x: x.fillna(x.max()))\n\n wide = all_combos_merged[[\"n_risk_filled\", \"n_events\"]].reset_index().pivot(index = \"DaysToEventOrCensoring\", columns=\"Zipcode_5\")\n print(\"Wide data frame ready\")\n \n '''\n Compute pairwise distances for all zip code KM curves.\n '''\n def get_one_zip_row(i):\n o1 = wide.loc[:, ('n_events', all_zips[i])].values.reshape(-1, 1)\n n1 = wide.loc[:, ('n_risk_filled', all_zips[i])].values.reshape(-1, 1)\n\n oall = wide.loc[:, 'n_events'].values\n nall = wide.loc[:, 'n_risk_filled'].values\n\n ovn = (o1 + oall)/(n1 + nall)\n\n e1 = ovn.T.dot(n1)\n e2 = (ovn * nall).sum(axis=0).reshape(-1,1)\n\n return(\n ((o1.sum() - e1)**2/e1 + (oall.sum(axis=0).reshape(-1, 1) - e2)**2/e2).reshape(-1, )\n )\n\n all_zips = kms.index.drop_duplicates().values\n distances = np.zeros((len(all_zips), len(all_zips)))\n\n for i in range(len(all_zips)):\n distances[:, i] = distances[i, :] = get_one_zip_row(i)\n\n if i % 20 == 0:\n print(i)\n print(time.time() - t0)\n t0 = time.time()\n if i % 500 == 0:\n print(\"SAVED at i = \")\n np.save(save_path, distances)\n np.save(save_path, distances)\n\ndef ClusterZipCodes(train, clusterix, distances_path = None, save_path = None):\n\n kms = train[[\"Zipcode_5\", \"DaysToEventOrCensoring\", \"AnyOutcome\"]].copy()\n n_events = kms.groupby([\"Zipcode_5\", \"DaysToEventOrCensoring\"]\n ).AnyOutcome.sum().reset_index().rename({\"AnyOutcome\":\"n_events\"}, axis=1)\n \n kms[\"n_risk\"] = kms.groupby(\"Zipcode_5\").DaysToEventOrCensoring.rank(ascending=False, method=\"first\")\n kms = kms.drop(\"AnyOutcome\", axis=1).sort_values(by = \"n_risk\").groupby([\"Zipcode_5\", \"DaysToEventOrCensoring\"]\n ).last().reset_index()\n kms = kms.merge(n_events)\n \n kms[\"ri\"] = 1 - kms.n_events/kms.n_risk\n kms[\"vst\"] = kms.n_events/((kms.n_risk - kms.n_events)*kms.n_risk)\n\n kms[\"St\"] = kms.sort_values(by = \"DaysToEventOrCensoring\", ascending = True).groupby(\"Zipcode_5\").ri.cumprod()\n kms[\"StVar\"] = kms.sort_values(by = \"DaysToEventOrCensoring\", ascending = True).groupby(\"Zipcode_5\").vst.cumsum()\n kms[\"StVar\"] = kms.St**2 * kms.StVar\n \n kms.set_index(\"Zipcode_5\", inplace=True)\n all_zips = kms.index.drop_duplicates().values\n \n distances = ComputeZipCodeDistances(kms, distances = distances_path, save_path = save_path)\n \n clusts, meds = cluster(distances, clusterix)\n \n return({\n \"Clusters\" : clusts,\n \"Medians\" : meds,\n \"KMs\" : kms,\n \"AllZips\" : all_zips\n })\n\n " }, { "alpha_fraction": 0.5979424118995667, "alphanum_fraction": 0.624691367149353, "avg_line_length": 35.83333206176758, "blob_id": "81799ad7c8bc4de2474cd8a044ad0d93279fe3dd", "content_id": "3dd2942bf8d2eb8a902383d86157f5ea31335a3f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2430, "license_type": "no_license", "max_line_length": 128, "num_lines": 66, "path": "/code/PrepareData.py", "repo_name": "erincr/CVDRisk", "src_encoding": "UTF-8", "text": "import pandas as pd\nimport numpy as np\nfrom sklearn.model_selection import train_test_split\n\n\ndef PrepareData(datapath, zipdatapath):\n dt = pd.read_csv(datapath)\n dtzip = pd.read_csv(zipdatapath)\n\n dt[\"Censor_Date_Datetime\"] = pd.to_datetime(dt.Censor_Date)\n dt[\"Index_Date_Datetime\"] = pd.to_datetime(dt.Index_Date)\n dt[\"Index_Date_Plus_10_Years\"] = pd.to_datetime(dt.Index_Date_Plus_10_Years)\n dt[\"Outcome_Date_Datetime\"] = pd.to_datetime(dt.Outcome_Date)\n dt[\"DaysToCensoring\"] = dt.Censor_Date_Datetime - dt.Index_Date_Datetime\n\n # This should be ~8%\n print(\"Prevalence: \", dt.AnyOutcome.sum()/len(dt))\n\n # ~14,000\n print(\"Number of ZIP codes:\", len(dt.Zipcode_5.drop_duplicates()))\n\n zips = dt.Zipcode_5.drop_duplicates()\n clean_zips = pd.DataFrame(\n {\"unique_zip\": [x[0] if len(x) > 0 else \"\" for x in [[y for y in x.split(\"_\") if y in dtzip.Zip.values] for x in zips]],\n \"grouped_zip\": zips\n }\n )\n fixedzips = dtzip.merge(clean_zips, left_on = \"Zip\", right_on = \"unique_zip\")\n\n dt = dt.merge(fixedzips, left_on = \"Zipcode_5\", right_on = \"grouped_zip\")\n dt.drop(\"unique_zip\", axis=1, inplace=True) \n\n dt[\"DaysToCensoring\"] = dt.Censor_Date_Datetime - dt.Index_Date_Datetime\n dt[\"WeeksToCensoring\"] = np.ceil(dt.DaysToCensoring.dt.days/7)\n\n dt[\"Censored\"] = 1\n dt.loc[(dt.AnyOutcome == 0) & (dt.DaysToCensoring.dt.days >= 10 * 365.25), \"Censored\"] = 0\n dt.loc[dt.AnyOutcome == 1, \"Censored\"] = 0\n\n dt['Gdr_M'] = 0\n dt.loc[dt.Gdr_Cd == 'M', 'Gdr_M'] = 1\n\n dtOutcome = dt.loc[dt.AnyOutcome == 1]\n dtNoOutcome = dt.loc[dt.AnyOutcome == 0]\n\n train1, test1 = train_test_split(dtOutcome, test_size=0.2, random_state=42)\n val1, test1 = train_test_split(test1, test_size=0.5, random_state=42)\n\n train0, test0 = train_test_split(dtNoOutcome, test_size=0.2, random_state=42)\n val0, test0 = train_test_split(test0, test_size=0.5, random_state=42)\n\n train = pd.concat([train1, train0]).reset_index()\n test = pd.concat([test1, test0]).reset_index()\n val = pd.concat([val1, val0]).reset_index()\n\n print(len(train) + len(test) + len(val) == len(dt))\n print(train.AnyOutcome.sum()/len(train))\n print(test.AnyOutcome.sum()/len(test))\n print(val.AnyOutcome.sum()/len(val))\n \n return({\n \"Train\": train,\n \"Test\" : test,\n \"Val\" : val,\n \"Zips\" : fixedzips\n })" }, { "alpha_fraction": 0.4626474380493164, "alphanum_fraction": 0.4914810061454773, "avg_line_length": 29.520000457763672, "blob_id": "7fe67c5108f776a545a8a4bc100040f99765ae31", "content_id": "57c3af06746f507158dfc4d4e711baf8df128cf8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 763, "license_type": "no_license", "max_line_length": 65, "num_lines": 25, "path": "/code/PerformanceMetrics.py", "repo_name": "erincr/CVDRisk", "src_encoding": "UTF-8", "text": "import numpy as np\n\n\ndef unoC(t, p, o, w):\n tt = t[t == 1]\n pt = p[t == 1]\n ot = o[t == 1]\n wt = w[t == 1]\n\n def chunked_cindex(chunk, bufsize = len(ot)):\n for start in range(0, bufsize + chunk, chunk):\n s, e = start, min(start + chunk, bufsize)\n print(s)\n oc_bool_matrix = o > ot[s:e].reshape(-1,1)\n pr_bool_matrix = p < pt[s:e].reshape(-1,1)\n both = np.logical_and(oc_bool_matrix, pr_bool_matrix)\n\n n = np.sum(both, axis=1).dot(1/wt[s:e]**2)\n d = np.sum(pr_bool_matrix, axis=1).dot(1/wt[s:e]**2)\n yield n, d\n\n info = [(n,d) for n,d in chunked_cindex(10000)]\n c = sum([x[0] for x in info])/sum([x[1] for x in info])\n print(c)\n return(c)\n" } ]
6
phelpsw/spacejunk.py
https://github.com/phelpsw/spacejunk.py
ac37c5f9e851e0cee1765f3c0c52e5678674de01
335e989999a06f40f0e877209fea48b722a96182
4d00fc4e693fdd89797ca9c1d81558456933a852
refs/heads/master
2016-09-06T19:01:19.506374
2014-05-14T19:10:27
2014-05-14T19:10:27
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7288494110107422, "alphanum_fraction": 0.750423014163971, "avg_line_length": 30.49333381652832, "blob_id": "e5fcc16b21b0402583b0a2b19eac23dc523f9b4f", "content_id": "8e6561731e1c3624a429e90e8c779550e225c9e1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2364, "license_type": "no_license", "max_line_length": 277, "num_lines": 75, "path": "/README.md", "repo_name": "phelpsw/spacejunk.py", "src_encoding": "UTF-8", "text": "# Mapping Engine\n\n## Generate GeoJSON base map\n\nBased on the sample here http://bost.ocks.org/mike/map/\n\nAnd this really excellent site: http://www.naturalearthdata.com/downloads/50m-physical-vectors/\n\nThis only needs to occur once to render the map against which all else will be rendered.\n\n```\nsudo apt-get install gdal-bin nodejs npm nodejs-legacy\nsudo npm install -g topojson\n```\n\n* http://www.naturalearthdata.com/downloads/50m-physical-vectors/\n* ```wget http://www.naturalearthdata.com/http//www.naturalearthdata.com/download/50m/physical/ne_50m_coastline.zip````\n\n### Build Map\n```\ngit clone https://github.com/mbostock/world-atlas.git\ncd world-atlas\nnpm install\nmake topo/world-50m.json\n```\n\nAlternative mapping library: http://matplotlib.org/basemap/users/examples.html\n\n\n# Association Server\n\nRuns periodically, gathers list of all norad ids. For each norad id, looks up associated objects. Updates the associated objects list for a user (with the exception of listed objects)\n\n\n# TLE Update server\n\n## TLE Update Thread\n\nRuns once every day.\n\nResponsible for building a list of norad ids to update. It then makes the necessary requests to space-track.org and stores the resulting TLE in a map. Because the entire space-track catalog (50k sats * 200B per listing) can fit into about 10MB this doesn't seem unreasonable.\n\nThis uses config.py for space-track.org login credentials. Once this file has been updated with creds, use ```git update-index --assume-unchanged config.py``` so git will ignore changes on this file.\n\n\n## TLE Propagation Thread\nFor each norad id, update the current position and update mongodb.\n\n## Setup\n```\nsudo apt-get install mongod\nsudo pip install pymongo\nsudo pip install pyephem\n\nmongo\nuse satdb\nuser = {name : \"phelps\", objects : [1, 2, 3]}\ndb.users.insert(user)\nuser = {name : \"bob\", objects : [2, 3, 4]}\ndb.users.insert(user)\n\nsat = {norad: 1, latitude: 10.0, longitude: -10.0}\ndb.satellites.insert(sat)\nsat = {norad: 2, latitude: 20.0, longitude: -20.0}\ndb.satellites.insert(sat)\nsat = {norad: 3, latitude: 30.0, longitude: -30.0}\ndb.satellites.insert(sat)\nsat = {norad: 4, latitude: 40.0, longitude: -40.0}\ndb.satellites.insert(sat)\n\ndb.users.distinct('objects')\n```\n# Mapping server\n\nLooks up satellites associated with given userid. Takes the positions of each satellite and returns last calculated location.\n\n\n" }, { "alpha_fraction": 0.5829244256019592, "alphanum_fraction": 0.6270853877067566, "avg_line_length": 31.838708877563477, "blob_id": "4ae337f07999b2d4d922d884b5587b320a4c07dd", "content_id": "0ac6bfbd0f291a2f0088d69bb895bec789868272", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1019, "license_type": "no_license", "max_line_length": 80, "num_lines": 31, "path": "/tle_propagator.py", "repo_name": "phelpsw/spacejunk.py", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nfrom pymongo import MongoClient\nimport ephem\nfrom datetime import datetime\nfrom math import pi\n\nstart = datetime.now()\nclient = MongoClient('localhost', 27017)\ndb = client.satdb\n\nsats = db.satellites.find({'status' : 'alive'})\nrad_to_deg = 180.0 / pi\nfor sat in sats:\n target = ephem.readtle(str(sat['name']), str(sat['tle1']), str(sat['tle2']))\n target.compute(datetime.utcnow())\n\n # Quick and dirty way to calc apogee and perigee from Ted Molczan\n # http://www.satobs.org/seesat/Dec-2002/0197.html\n a = (8681663.653 / target._n) ** (2.0/3.0)\n perigee = a * (1 - target._e) - 6371.0\n apogee = a * (1 + target._e) - 6371.0\n\n db.satellites.update({'norad': sat['norad']},\n\t {'$set' : {'lat' : target.sublat * rad_to_deg,\n\t\t 'lon' : target.sublong * rad_to_deg,\n\t\t 'perigee' : perigee,\n\t\t 'apogee' : apogee}})\nruntime = datetime.now() - start\nprint \"tle_propagator.py %d satellites, runtime: %s, %s/sat\" % \\\n\t(sats.count(), runtime, runtime / sats.count())\n\n" }, { "alpha_fraction": 0.6838390827178955, "alphanum_fraction": 0.6958362460136414, "avg_line_length": 25.716981887817383, "blob_id": "9ec77df49b2d2c17586c86d3ae7a610d130b288e", "content_id": "528979fa3d2a36586654f3e776abecbe2ddde6ff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1417, "license_type": "no_license", "max_line_length": 162, "num_lines": 53, "path": "/tle_updater.py", "repo_name": "phelpsw/spacejunk.py", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nfrom pymongo import MongoClient\nimport urllib\nimport urllib2\nimport cookielib\nimport json\nimport sys\n\n\n\nclient = MongoClient('localhost', 27017)\ndb = client.satdb\n\nnorad_ids = db.users.distinct('objects')\n\n# TODO: there is probably a better way to do this\nfrom config import username, password\n\ncookieJar = cookielib.CookieJar()\nopener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookieJar))\nlogin_uri = 'https://www.space-track.org/ajaxauth/login'\nvalues = {'identity' : username,\n 'password' : password}\ndata = urllib.urlencode(values)\nopener.open(login_uri, data)\n\n# TODO: check if we successfully logged in\n\nfor norad_id in norad_ids:\n tle_uri = \"https://www.space-track.org/basicspacedata/query/class/tle_latest/NORAD_CAT_ID/\"+str(int(norad_id))+\"/orderby/ORDINAL%20asc/limit/1/metadata/false\"\n try:\n\tresp = opener.open(tle_uri)\n\ttle = json.loads(resp.read())[0]\n\tprint tle\n\n\t# TODO: check if object has decayed!\n\n\tdb.satellites.update({'norad': int(norad_id)},\n\t\t{'norad': int(norad_id),\n\t\t 'name': str(tle['OBJECT_NAME']),\n\t\t 'tle1': str(tle['TLE_LINE1']),\n\t\t 'tle2': str(tle['TLE_LINE2']),\n\t\t 'status' : 'alive'},\n\t\tupsert=True)\n except IndexError:\n\t# TODO: consider status == invalid\n\tdb.satellites.update({'norad': int(norad_id)},\n\t\t{'norad': int(norad_id), 'status': 'dead'},\n\t\tupsert=True)\n except:\n\tprint \"Unexpected error:\", sys.exc_info()[0]\n\tpass\n\n" } ]
3
SmituSSupp/AsYetUntitled
https://github.com/SmituSSupp/AsYetUntitled
bc64bd97e9e19c6d673d4c1dec71cea6eb7c5ac1
10cf8a315fbe6dabd7d1356addb20fa4e5eef854
c04b24ab35e847a0ad2b35cfb326e1430b118122
refs/heads/master
2023-07-27T00:52:02.975942
2021-09-11T11:44:49
2021-09-11T11:44:49
397,374,716
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5054884552955627, "alphanum_fraction": 0.702524721622467, "avg_line_length": 16.689319610595703, "blob_id": "619bbe4afb81b5f6b7acc4d14fd10abc7f977599", "content_id": "92de64bc623d3fbb69fbfa9f1b627ec8954dd56f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 1822, "license_type": "no_license", "max_line_length": 78, "num_lines": 103, "path": "/requirements.txt", "repo_name": "SmituSSupp/AsYetUntitled", "src_encoding": "UTF-8", "text": "appdirs==1.4.4\nargon2-cffi==20.1.0\nasync-generator==1.10\nattrs==21.2.0\nbackcall==0.2.0\nbeautifulsoup4==4.9.3\nbleach==4.0.0\nbs4==0.0.1\ncertifi==2021.5.30\ncffi==1.14.6\ncharset-normalizer==2.0.4\ncolorama==0.4.4\ncssselect==1.1.0\ncycler==0.10.0\ndebugpy==1.4.1\ndecorator==5.0.9\ndefusedxml==0.7.1\nentrypoints==0.3\net-xmlfile==1.1.0\nfake-useragent==0.1.11\nfeedparser==6.0.8\nfindiff==0.8.9\nidna==3.2\nimageio==2.9.0\nimportlib-metadata==2.1.1\nipykernel==6.0.3\nipython==7.26.0\nipython-genutils==0.2.0\njedi==0.18.0\nJinja2==3.0.1\njsonschema==3.2.0\njupyter-client==6.1.12\njupyter-core==4.7.1\njupyterlab-pygments==0.1.2\nkiwisolver==1.3.1\nlxml==4.6.3\nMarkupSafe==2.0.1\nmatplotlib==3.4.2\nmatplotlib-inline==0.1.2\nmistune==0.8.4\nmplfinance==0.12.7a17\nmpmath==1.2.1\nmultitasking==0.0.9\nnbclient==0.5.3\nnbconvert==6.1.0\nnbformat==5.1.3\nnest-asyncio==1.5.1\nnetworkx==2.6.2\nnotebook==6.4.2\nnumpy==1.21.1\nopenpyxl==3.0.7\npackaging==21.0\npandas==1.3.1\npandocfilters==1.4.3\nparse==1.19.0\nparso==0.8.2\npickleshare==0.7.5\nPillow==8.3.1\nplotly==5.1.0\nprometheus-client==0.11.0\nprompt-toolkit==3.0.19\npycparser==2.20\npyee==8.1.0\nPygments==2.9.0\npyparsing==2.4.7\npyppeteer==0.2.5\npyquery==1.4.3\npyrsistent==0.18.0\npython-dateutil==2.8.2\npytz==2021.1\nPyWavelets==1.1.1\npywin32==301\npywinpty==1.1.3\npyzmq==22.2.1\nrequests==2.26.0\nrequests-html==0.10.0\nscikit-image==0.18.2\nscipy==1.7.1\nSend2Trash==1.7.1\nsgmllib3k==1.0.0\nsix==1.16.0\nsoupsieve==2.2.1\nsympy==1.8\nTA-Lib @ file:///C:/Users/Max/Downloads/TA_Lib-0.4.21-cp37-cp37m-win_amd64.whl\ntenacity==8.0.1\nterminado==0.10.1\ntestpath==0.5.0\ntifffile==2021.8.8\ntornado==6.1\ntqdm==4.62.0\ntraitlets==5.0.5\ntrendln==0.1.10\ntyping-extensions==3.10.0.0\nurllib3==1.26.6\nw3lib==1.22.0\nwcwidth==0.2.5\nwebencodings==0.5.1\nwebsockets==8.1\nwincertstore==0.2\nxlrd==2.0.1\nyahoo-fin==0.8.9.1\nyfinance==0.1.63\nzipp==3.5.0\n" }, { "alpha_fraction": 0.6276803016662598, "alphanum_fraction": 0.6842105388641357, "avg_line_length": 33.266666412353516, "blob_id": "2db463cbf19dd3172c73b52900538ac7ca66ec43", "content_id": "43fb6b91e4f7d8067e8001162ae2869745137c15", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 513, "license_type": "no_license", "max_line_length": 95, "num_lines": 15, "path": "/collect_moex_info.py", "repo_name": "SmituSSupp/AsYetUntitled", "src_encoding": "UTF-8", "text": "import pandas as pd\n\nfile_loc = \"fixed_sheet.xlsx\"\nstart_date = \"2021-06-18\"\nend_date = \"2021-08-18\"\n\n\ndf = pd.ExcelFile(file_loc).parse('18.06.2021') #you could add index_col=0 if there's an index\nmoex_tickers = df[:100]['Code']\ncompanies_and_prices = {}\n\nfor ticker in moex_tickers:\n company_prices = yf.download(ticker + '.ME', start=start_date, end=end_date, interval='1d')\n company_info = yf.Ticker(ticker + '.ME').info\n companies_and_prices[ticker] = {'prices':company_prices, 'info': company_info}" } ]
2
NightKev/suikabot-py
https://github.com/NightKev/suikabot-py
0d4a2868928f96019b943ec1c01aa8f5039ae3c7
df05fa5261f71e6fc65ce7667ae593a73e65f18d
8572ce50ccd0d61e874a89d4f4212a9b4be85aaf
refs/heads/master
2021-01-22T14:05:26.518467
2016-08-27T10:03:29
2016-08-28T03:36:56
66,237,145
0
0
null
2016-08-22T03:36:05
2016-08-22T03:36:03
2016-08-15T05:55:13
null
[ { "alpha_fraction": 0.5733333230018616, "alphanum_fraction": 0.5783907771110535, "avg_line_length": 25.851852416992188, "blob_id": "4c2ac1ed045abc78320f2336d1f8f0dbea9a6913", "content_id": "ac5ab0c60efe8ad8bcfdd9119056fb7074c2fc17", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2175, "license_type": "no_license", "max_line_length": 145, "num_lines": 81, "path": "/plugins/suika.py", "repo_name": "NightKev/suikabot-py", "src_encoding": "UTF-8", "text": "import sys\nimport re\n\n# parsing dictionaries\ncommand_verbs = [\n 'give',\n 'bullying',\n]\n\ncommand_tverb = [\n 'is'\n]\n\ncommand_prep = [\n 'to'\n]\n\ncommand_desc = [\n 'glass',\n 'cup'\n]\n\ncommand_nouns = [\n 'sake',\n 'tea'\n]\n\n\ndef match_command (line):\n '''Return if a line is a command and the line without the command identifier.'''\n match_kw = 'suika|suika_ibuki|suikaibuki|suika1buki|suika_1buki'\n match_punct = ',.?!'\n match_re = '^({0}[{1}]*)|({0}[{1}]*)$'.format(match_kw, match_punct)\n\n return (re.match(match_re, line) != None,\n re.sub(match_re, '', line))\n\ndef parse_command (line):\n '''Parse a natural language line into a command dictionary'''\n words = re.split('[,\\s]+', line)\n\n command = {}\n\n # \"\"language\"\" processing\n pword = ''\n for word in words:\n if word in command_verbs:\n command['verb'] = word\n elif word in command_nouns:\n command['dobject'] = word\n if pword not in (command_verbs + command_desc):\n command['iobject'] = pword\n elif pword in command_prep:\n command['iobject'] = word\n elif word in command_tverb:\n command['iobject'] = pword\n\n pword = word\n \n return command\n\ndef irc_public (client, user, channel, message):\n if client.access_list.check(user, 0):\n is_command, line = match_command(message)\n\n if is_command:\n command = parse_command(line)\n \n # run the sub-command\n mod = sys.modules[__name__]\n cmd_func = 'cmd_{0}'.format(command['verb'])\n if (hasattr(mod, cmd_func)):\n getattr(mod, cmd_func)(client, user, channel, command)\n\n# sub-commands\ndef cmd_give (client, user, channel, command):\n if command['iobject'] == 'me': command['iobject'], _ = user.split('!')\n client.describe(channel, 'gives {0} a nice warm cup of {1}'.format(command['iobject'], command['dobject']))\n\ndef cmd_bullying (client, user, channel, command):\n client.say(channel, '{0}: I feel offended by your recent action(s). Please read http://stop-irc-bullying.eu/stop'.format(command['iobject']))\n" }, { "alpha_fraction": 0.4739725887775421, "alphanum_fraction": 0.48812785744667053, "avg_line_length": 41.94117736816406, "blob_id": "fe6fe98a22cb91bcf3e74a36e230e4ae568a60a4", "content_id": "e7b16009f067a9304bb58a0d90e22edbfd3deeb3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2190, "license_type": "no_license", "max_line_length": 129, "num_lines": 51, "path": "/modules/filters.py", "repo_name": "NightKev/suikabot-py", "src_encoding": "UTF-8", "text": "import string\nimport random\nimport datetime\n\nclass TrollOutputFilter:\n def __init__(self): \n self.currentTroll = random.randint(0, 10)\n self.trollCounter = 0\n self.trolls = [\n #aradia\n { 'prefix': '', 'replace': lambda x: x.translate(string.maketrans('oo', '00')) },\n #terezi\n { 'prefix': '', 'replace': lambda x: x.upper().translate(string.maketrans('AIE', '413')) },\n #tavros\n { 'prefix': '', 'replace': lambda x: x.title().swapcase() },\n #sollux\n { 'prefix': '', 'replace': lambda x: x.replace('s', '2').replace('S', '2').replace('i', 'ii').replace('I', 'II') },\n #karkat\n { 'prefix': '', 'replace': lambda x: x.upper() },\n #nepeta\n { 'prefix': ':33 <', 'replace': lambda x: x.replace('ee', '33').replace('EE', '33') },\n #kanaya\n { 'prefix': '', 'replace': lambda x: x.capitalize() },\n #vriska\n { 'prefix': '', 'replace': lambda x: x.translate(string.maketrans('bB', '88')).replace('ate', '8') },\n #equius\n { 'prefix': 'D -->', 'replace': lambda x: x.translate(string.maketrans('xX', '%%')) },\n #gamzee TODO need a full func\n #eridan\n { 'prefix': '', 'replace': lambda x: x.replace('w', 'ww').replace('v', 'vv').replace('W', 'WW').replace('V', 'VV') },\n #feferi\n { 'prefix': '', 'replace': lambda x: x.replace('h', ')(').replace('H', ')(').replace('E', '-E') },\n ]\n\n def transform(self, message):\n d = datetime.date.today()\n if (d.month != 4 or d.day != 13):\n return message\n\n self.trollCounter += 1\n if (self.trollCounter > random.randint(6, 12)):\n self.currentTroll = random.randint(0, 10)\n self.trollCounter = 0\n \n def trollUnlessURL(x):\n if not x.startswith(('http://', 'https://', 'ftp://')):\n return self.trolls[self.currentTroll]['replace'](x)\n \n return x\n \n return self.trolls[self.currentTroll]['prefix'] + ' '.join(map(trollUnlessURL, message.split()))\n" }, { "alpha_fraction": 0.5993704795837402, "alphanum_fraction": 0.605215847492218, "avg_line_length": 27.87013053894043, "blob_id": "f8145fc3b7649bd801034d145dbec48ee9e764d5", "content_id": "249c611593c0282e94cb97d4b2c08d0671dbf1c2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2224, "license_type": "no_license", "max_line_length": 120, "num_lines": 77, "path": "/modules/util.py", "repo_name": "NightKev/suikabot-py", "src_encoding": "UTF-8", "text": "import os\nimport re\nimport sys\n\nimport random\nimport appdirs\nimport errno\nimport yaml\nimport logging\n\nfrom twisted.words.protocols.irc import stripFormatting\nfrom twisted.words.protocols.irc import assembleFormattedText\nfrom twisted.words.protocols.irc import attributes as ircFormatting\n\nlogging.basicConfig(format=\"[%(levelname)s] [%(asctime)s] %(message)s\", datefmt=\"%Y-%m-%d %H:%M:%S\")\n\nlogger = logging.getLogger(\"suikabot\")\nlogger.setLevel(logging.INFO)\n \ndef mkdir(dirname):\n try:\n os.makedirs(dirname)\n except OSError as e:\n if e.errno != errno.EEXIST and not os.path.isdir(e.filename):\n raise\n\ndef ircmask_match (pattern, mask):\n '''Match an irc-style mask against a wildcard pattern.'''\n pattern = re.escape(pattern).replace('\\\\*', '.*')\n return re.match(pattern, mask) != None\n\ndef ircmask_split (hostmask):\n nick, userhost = hostmask.split('!', 1)\n user, host = userhost.split('@', 1)\n\n return (nick, user, host)\n\nclass Config:\n def __init__ (self, config_dir):\n self.config_dir = appdirs.user_config_dir(config_dir)\n mkdir(config_dir)\n\n def format_config_name (self, config):\n return os.path.join(self.config_dir, '{0}.conf'.format(config))\n \n\n def load (self, config):\n fname = self.format_config_name(config)\n \n try:\n with open(fname, 'r') as f:\n return yaml.load(f)\n except IOError as e:\n logger.error(\"Couldn't read config file {0}! {1}\".format(fname, e))\n \n return {}\n\n def save (self, config, data):\n fname = self.format_config_name(config)\n \n try:\n with open(fname, 'w+') as f:\n return yaml.dump(data, f)\n except IOError as e:\n logger.error(\"Couldn't write config file {0}! {1}\".format(fname, e))\n\nclass PhraseMap:\n def __init__ (self):\n self.phrases = {\n 'success': ['Success! {0}', 'Okay, {0}', '*hic* Sure, {0}', 'Yes. {0}', 'Absolutely! {0}', 'Of course, {0}']\n }\n\n def get (self, category):\n return random.choice(self.phrases[category]) \n\n def format (self, category, *args):\n return self.get(category).format(*args)\n\n" }, { "alpha_fraction": 0.6049545407295227, "alphanum_fraction": 0.60782790184021, "avg_line_length": 30.63882064819336, "blob_id": "7bb403ad2d28c02ee33e32f033adec49a9537a21", "content_id": "3ae09dcafb75d3a65be25efd2336961ca2015aea", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12877, "license_type": "no_license", "max_line_length": 133, "num_lines": 407, "path": "/suikabot.py", "repo_name": "NightKev/suikabot-py", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python2\n\nimport os\nimport sys\nimport imp\n\nimport re\nimport json\nimport errno\nimport traceback\n\nimport threading\nimport pickle\n\nimport appdirs\nimport ssl\n\nimport warnings\n\nfrom modules import util, filters\n\nfrom twisted.words.protocols import irc\nfrom twisted.internet import reactor, protocol\nfrom twisted.internet.ssl import ClientContextFactory as SSLClientContextFactory\nfrom twisted.internet.protocol import ReconnectingClientFactory\nfrom twisted.internet.endpoints import TCP4ClientEndpoint, SSL4ClientEndpoint, connectProtocol\nfrom Queue import Queue\n\nclass DataWriter:\n '''Threaded pickle data writing subsystem. Assumes small, infrequent writes'''\n def __init__ (self, data_dir='.'):\n self.data_dir = data_dir\n self.thread = threading.Thread(target=self.run)\n self.queue = Queue()\n\n self.thread.daemon = True\n self.thread.start()\n\n def add (self, fname, data):\n '''Queue the given data to be written to a file'''\n self.queue.put((fname, data))\n\n def get(self, fname):\n '''Get the data from the given file'''\n try:\n with open(os.path.join(self.data_dir, fname), 'rb') as f:\n data = pickle.load(f)\n return data\n except IOError:\n util.logger.warning(\"Tried to load nonexistent data file {0}\".format(fname))\n except EOFError:\n util.logger.warning(\"Loaded empty database file for {0}\".format(fname))\n\n return []\n\n def run (self):\n while True:\n util.mkdir(self.data_dir)\n\n fname, data = self.queue.get()\n with open(os.path.join(self.data_dir, fname), 'wb') as f:\n pickle.dump(data, f) \n\nclass AliasMap:\n def __init__ (self):\n self.aliases = []\n\n def find_alias_indices (self, alias_in, alias):\n in_idx = None\n needle_idx = None\n\n for idx, group in enumerate(self.aliases):\n if alias_in.lower() in group:\n in_idx = idx\n if alias.lower() in group:\n needle_idx = idx\n\n # got what we came here for\n if in_idx != None and needle_idx != None:\n return (in_idx, needle_idx)\n\n return (in_idx, needle_idx)\n \n def is_alias_of (self, alias_in, alias):\n in_idx, needle_idx = self.find_alias_indices(alias_in, alias)\n return in_idx != None and needle_idx != None and in_idx == needle_idx\n\n def get_aliases (self, alias):\n in_idx, needle_idx = self.find_alias_indices('', alias)\n if needle_idx != None:\n return self.aliases[needle_idx]\n\n return [alias]\n\n def add (self, alias_in, alias):\n if alias_in.strip() == '' or alias.strip() == '':\n\t return False\n\n in_idx, needle_idx = self.find_alias_indices(alias_in, alias)\n \n # new alias already exists\n if needle_idx != None:\n return False\n\n # the parent alias isn't there, make a new group\n if in_idx == None:\n self.aliases.append([alias_in.lower()])\n in_idx = len(self.aliases) - 1\n\n self.aliases[in_idx].append(alias.lower())\n return True\n\n def remove (self, alias):\n # can optimize this by just calling remove() on every group\n in_idx, needle_idx = self.find_alias_indices('', alias)\n if needle_idx != None:\n self.aliases[needle_idx].remove(alias)\n return True\n\n return False\n\nclass AccessList:\n LEVEL_OWNER = 100\n LEVEL_OP = 10\n\n def __init__ (self):\n self.access_map = {}\n\n def add (self, mask, level):\n self.access_map[mask] = level\n\n def delete (self, mask):\n if mask in self.access_map:\n del self.access_map[mask]\n\n def check (self, mask, level):\n '''Return if a given mask has at least the specified permissions.'''\n\n for p, l in self.access_map.viewitems():\n if util.ircmask_match(p, mask):\n return l >= level\n\n return False\n\n# FIXME: wtf is this shit\nclass Scheduler:\n def schedule (self, delay, callback, *args):\n reactor.callLater(delay, callback, *args)\n\nclass PluginLoader:\n def __init__ (self, plugin_dir='.'):\n self.plugins = {}\n self.plugin_dir = plugin_dir\n self.data_writer = None\n self.services = {}\n\n def load (self):\n plugin_files = os.listdir(self.plugin_dir)\n #suffixes = [x[0] for x in imp.get_suffixes()]\n suffixes = ['.py']\n\n errors = []\n\n for plugin_file in plugin_files:\n name, suffix = os.path.splitext(plugin_file)\n if suffix not in suffixes:\n continue\n try:\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\", RuntimeWarning)\n mod = imp.load_source('suikabot.plugin.{0}'.format(name), os.path.join(self.plugin_dir, plugin_file))\n self.plugins[name] = mod\n\n mod.data_writer = self.data_writer # FIXME: magic global variable is ugly\n mod.services = self.services\n \n # init if we can \n if hasattr(mod, 'init'):\n mod.init()\n else:\n util.logger.warning('No init defined for module {0}'.format(name))\n \n util.logger.info('Loaded module {0} from {1}'.format(name, self.plugin_dir))\n\n # general module loading problems\n except Exception as e:\n util.logger.error('Exception while loading module {0}! {1}'.format(plugin_file, e))\n #FIXME: this is wrong line number for whatever reason\n errors.append((plugin_file, type(e).__name__, sys.exc_info()[-1].tb_lineno))\n \n print errors\n return errors\n\n def reload (self):\n self.plugins = {}\n return self.load()\n\n def get (self):\n return self.plugins\n\nclass SuikaClient(irc.IRCClient):\n '''\n main bot class\n sends ALL events to loaded plugins (as raw_*)\n also sends Twisted's convenience events (as irc_*)\n '''\n\n def __init__ (self, server):\n self.server = server\n self.access_list = None\n self.alias_map = None\n self.plugins = None\n self.services = {};\n\n self.lineRate = 1\n\n def dispatch_to_plugins (self, handler, *args):\n for plugin in self.plugins.get().viewvalues():\n # call the handler\n if hasattr(plugin, handler):\n getattr(plugin, handler)(self, *args)\n\n def handleCommand (self, command, prefix, params):\n handler = 'raw_{0}'.format(command.lower())\n self.dispatch_to_plugins(handler, prefix, params)\n\n util.logger.debug(\"{0}: {1} ({2})\".format(command, prefix, params))\n\n irc.IRCClient.handleCommand(self, command, prefix, params)\n\n def schedule (self, delay, callback, *args):\n reactor.callLater(delay, callback, *args)\n\n def connectionMade(self):\n util.logger.info(\"Connected to server {0}.\".format(self.server))\n\n self.services['clients'][self.server] = self\n self.dispatch_to_plugins(\"client_connected\")\n\n irc.IRCClient.connectionMade(self)\n\n def say (self, channel, message, length=None):\n for s in self.services['outputFilters']:\n message = s.transform(message)\n\n irc.IRCClient.say(self, channel, message, length)\n\n # the rest of these are convenience methods inherited from Twisted\n # each is forwarded to plugins\n # some may have internal tracking logic\n # yes this is very silly\n\n # TODO: implement all of them\n def privmsg (self, *args):\n if args[1] == self.nickname:\n # rearrange query parameters, channel = sender instead of ourselves\n # TODO: add some way for plugins to easily filter priv/pub messages\n args = list(args)\n args[1], _, _ = util.ircmask_split(args[0])\n self.dispatch_to_plugins('irc_public', *args)\n else: \n self.dispatch_to_plugins('irc_public', *args)\n\n def noticed (self, *args):\n self.dispatch_to_plugins('irc_notice', *args)\n \n def action (self, *args):\n self.dispatch_to_plugins('irc_action', *args)\n \n def modeChanged (self, *args):\n self.dispatch_to_plugins('irc_mode', *args)\n\n def topicUpdated (self, *args):\n self.dispatch_to_plugins('irc_topic', *args)\n\n def userRenamed (self, *args):\n self.dispatch_to_plugins('irc_nick', *args)\n\n def nickChanged (self, *args):\n self.dispatch_to_plugins('irc_nickchange', *args)\n\n def joined (self, *args):\n self.dispatch_to_plugins('irc_joined', *args)\n\n def userJoined (self, *args):\n self.dispatch_to_plugins('irc_join', *args)\n\n def left (self, *args):\n self.dispatch_to_plugins('irc_left', *args)\n \n def userLeft (self, *args):\n self.dispatch_to_plugins('irc_leave', *args)\n \n def kickedFrom (self, *args):\n self.dispatch_to_plugins('irc_kicked', *args)\n \n def userKicked (self, *args):\n self.dispatch_to_plugins('irc_kick', *args)\n\n def userQuit (self, *args):\n self.dispatch_to_plugins('irc_quit', *args)\n\nclass SuikaClientFactory(ReconnectingClientFactory):\n def set_info(self, server, nickname='dumb_bot', username='', realname='', server_password=''):\n self.server = server\n self.userinfo = (nickname, username, realname)\n self.server_password = server_password\n\n def buildProtocol (self, addr):\n client = SuikaClient(self.server)\n client.nickname = self.userinfo[0]\n client.username = self.userinfo[1]\n client.realname = self.userinfo[2]\n client.password = self.server_password\n\n # FIXME: refactor this as a \"service\" kind of design\n client.access_list = self.access_list\n client.alias_map = self.alias_map\n client.plugins = self.plugins\n\n client.services = self.services\n\n # required(?) by the api\n client.factory = self\n \n return client\n\n def clientConnectionLost (self, connector, reason):\n util.logger.warning(\"Lost connection. ({0})\".format(reason.getErrorMessage()))\n ReconnectingClientFactory.clientConnectionLost(self, connector, reason)\n\n def clientConnectionFailed (self, connector, reason):\n util.logger.warning(\"Connection failed. ({0})\".format(reason.getErrorMessage()))\n ReconnectingClientFactory.clientConnectionFailed(self, connector, reason)\n\ndef connect_client (server, address, port=6667, password=None, nickname='', username=None, realname=None, ssl=False, **kwargs):\n ''' Constructs and returns factory instance for a server after connecting it. '''\n factory = SuikaClientFactory()\n factory.set_info(server, nickname, username, realname, password)\n\n if ssl:\n reactor.connectSSL(address, port, factory, SSLClientContextFactory())\n else:\n reactor.connectTCP(address, port, factory)\n\n return factory\n\ndef main ():\n # client list (actually clientfactories)\n clients = {}\n\n # configuration files\n configuration = util.Config('suikabot')\n userinfo = configuration.load('userinfo')\n serverlist = configuration.load('servers')\n\n # services\n access_list = AccessList()\n access_list.access_map = configuration.load('accesslist')\n\n data_writer = DataWriter(appdirs.user_data_dir('suikabot'))\n\n alias_map = AliasMap()\n alias_map.aliases = data_writer.get('aliases.db') or []\n\n services = {}\n services['clients'] = {}\n services['scheduler'] = Scheduler()\n services['phrases'] = util.PhraseMap()\n services['outputFilters'] = [filters.TrollOutputFilter()]\n\n plugins = PluginLoader('plugins')\n plugins.data_writer = data_writer\n plugins.services = services\n plugins.load()\n\n # connection logic\n for server, opts in serverlist.viewitems():\n # clump the options together and let connect unpack w/ defaults\n opts.update(userinfo)\n factory = connect_client(server, **opts)\n\n # dependency inject\n factory.access_list = access_list\n factory.alias_map = alias_map\n factory.plugins = plugins\n factory.services = services\n\n clients[server] = factory\n\n # cleanup callback\n def shutdown ():\n util.logger.info(\"Shutting down...\")\n \n # save config files\n configuration.save('userinfo', userinfo)\n configuration.save('servers', serverlist)\n configuration.save('accesslist', access_list.access_map)\n data_writer.add('aliases.db', alias_map.aliases)\n \n reactor.addSystemEventTrigger('before', 'shutdown', shutdown)\n\n # main loop\n reactor.run()\n\nif __name__ == \"__main__\":\n main()\n" }, { "alpha_fraction": 0.702786386013031, "alphanum_fraction": 0.7120742797851562, "avg_line_length": 18, "blob_id": "68ecf006abe1bba029891cd531c52bfb61336c3b", "content_id": "15266a35aea3e4d575d9f8b66885a6257062630c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 323, "license_type": "no_license", "max_line_length": 49, "num_lines": 17, "path": "/README.md", "repo_name": "NightKev/suikabot-py", "src_encoding": "UTF-8", "text": "# suikabot\ndrunk oni irc robot\n\n* python 2\n* plugin based\n* YAML configs\n* pickled data storage\n\n## Dependencies\n* appdirs\n* Twisted with SSL support\n * pyopenssl\n * pypiwin32 (on Windows)\n* PyYAML\n* humanize (for the `later` and `remind` plugins)\n* pytz (for the `ping` plugin)\n* parsedatetime (for the `remind` plugin)\n" }, { "alpha_fraction": 0.5567413568496704, "alphanum_fraction": 0.5602988004684448, "avg_line_length": 29.225807189941406, "blob_id": "5b2526cc827711d569ece92d49e21cefb9456737", "content_id": "8534fe8c284d079dbb9e3771b385c181564a54dc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2811, "license_type": "no_license", "max_line_length": 115, "num_lines": 93, "path": "/plugins/later.py", "repo_name": "NightKev/suikabot-py", "src_encoding": "UTF-8", "text": "import time\nimport humanize\n\nfrom modules import util\nfrom collections import defaultdict\n\nclass Laters (defaultdict):\n def add (self, target, user, msg):\n self.get(target).append((user, msg, time.time()))\n \n def has (self, target):\n return target.lower() in self\n \n def get (self, target):\n return self[target.lower()]\n\n def remove (self, target):\n try:\n del self[target.lower()]\n except KeyError:\n pass\n\n def limitcheck (self, target, user):\n return len([l for l in self.get(target) if l[0].lower() == user.lower()]) < 6\n\n def load (self):\n self.clear()\n self.update(data_writer.get('laters.db'))\n\n def commit (self):\n data_writer.add('laters.db', dict(self))\n \nlaters = Laters(list)\n\ndef init ():\n laters.load()\n\ndef process_later (client, hostmask, channel, message):\n nick, user, host = util.ircmask_split(hostmask)\n\n target = nick.lower()\n\n # check for saved laters first\n aliases = client.alias_map.get_aliases(target)\n if (True in [laters.has(x) for x in aliases]):\n #lats = [l for al in [laters.get(y) for y in aliases] for l in al] # literal magic\n lats = []\n for y in aliases:\n lats += laters.get(y)\n\n for l in lats:\n sender, msg, t = l\n \n t = time.time() - t\n\n client.say(channel, \"{0}: Sent {1}: <{2}> {3}\".format(\n nick, humanize.naturaltime(t), sender, msg\n ))\n\n # clear out the passed messages\n for alias in aliases:\n laters.remove(alias)\n\n # save to disk\n laters.commit()\n \n # process commands\n if message.startswith('!later'):\n _, cmd, target, msg = message.split(' ', 3)\n \n if cmd in ['tell', 'remind']:\n t = target.lower()\n\n #if t in ['xpc', 'xpcybic', 'xpcynic', 'xpcyphone', 'xpcdroid']:\n # client.say(channel, \"Shhh!!! You know xpc doesn't like that!\")\n #else:\n if t == 'me':\n target = nick.lower()\n\n if laters.limitcheck(target, nick):\n laters.add(target, nick, msg)\n #client.say(channel, \"Okay, I'll remind {0} later!\".format(target))\n client.say(channel, services['phrases'].format('success', \"I'll remind {0} later!\".format(target)))\n laters.commit()\n else:\n client.say(channel, \"You already left {0} too many reminders!\".format(target))\n\ndef irc_public (client, hostmask, channel, message):\n process_later(client, hostmask, channel, message)\n\ndef irc_private (client, hostmask, channel, message):\n nick, user, host = util.ircmask_split(hostmask)\n process_later(client, hostmask, nick, message)\n" }, { "alpha_fraction": 0.6140127182006836, "alphanum_fraction": 0.6203821897506714, "avg_line_length": 38.20000076293945, "blob_id": "dcae3fc59ded0b9227936b3a66ac05d65c576958", "content_id": "b15ce95b02b5a069ad9c1cc52e6f6b7fed5981b6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 785, "license_type": "no_license", "max_line_length": 198, "num_lines": 20, "path": "/plugins/ping.py", "repo_name": "NightKev/suikabot-py", "src_encoding": "UTF-8", "text": "import random\nimport datetime\nimport pytz\n\nresponses = [\"pong\", \"butts\", \"bing\", \"bong\", \"dong\", \"wong\", \"gong\", \"ding\", \"wing\", \"ring\", \"that's gnu PLUS ping to you\", \"kentucky fried pingen\", \"burger ping\", \"ping of the hill\", \"xi jinping\"]\n\nbongzone = pytz.timezone('Europe/London')\n\ndef irc_public (client, user, channel, message):\n if message.strip('!#?.~') == \"ping\":\n client.say(channel, random.choice(responses))\n\n if message.strip('!#?.~') == 'bing':\n # i hate timezones\n bongs = bongzone.normalize(pytz.utc.localize(datetime.datetime.utcnow()).astimezone(bongzone)).hour\n #bongs = datetime.datetime.utcnow().hour\n if bongs > 12:\n bongs = bongs - 12\n\n client.say(channel, ' '.join(['bong' for x in range(0, bongs)])) \n" }, { "alpha_fraction": 0.6225680708885193, "alphanum_fraction": 0.6342412233352661, "avg_line_length": 35.71428680419922, "blob_id": "5ef8d23d6635fdb4e8eb788a794616a0e30b4967", "content_id": "e0b3aa9cc12bfcd279e2dfb61509ddaaa595f4e1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 257, "license_type": "no_license", "max_line_length": 56, "num_lines": 7, "path": "/plugins/example.py", "repo_name": "NightKev/suikabot-py", "src_encoding": "UTF-8", "text": "def irc_public (client, user, channel, message):\n # log messages to stdout\n print \"<{0}:{1}> {2}\".format(user, channel, message)\n\n # send a message to the server\n if message.startswith(\"you're a big guy\"):\n client.say(channel, \"for you\")\n" }, { "alpha_fraction": 0.6055351495742798, "alphanum_fraction": 0.6162053942680359, "avg_line_length": 36.48749923706055, "blob_id": "5982a488b08595843bc9f3798e86a0b4ab564dc5", "content_id": "3a783c593dc47b7a2deaca9e114c7911dc40aa88", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2999, "license_type": "no_license", "max_line_length": 183, "num_lines": 80, "path": "/plugins/remind.py", "repo_name": "NightKev/suikabot-py", "src_encoding": "UTF-8", "text": "import time\nimport datetime\nimport humanize\nimport parsedatetime\nimport random\nimport re\n\nfrom modules import util\nfrom collections import defaultdict\n\nreminders = defaultdict(list)\npdt = parsedatetime.Calendar()\nlinkscrubber = re.compile(r'\\bhttps?:\\/\\/.+?\\s', re.I)\n\ndef init ():\n global reminders # FIXME: PUKE\n reminders = data_writer.get(\"reminders.db\")\n\ndef client_connected (client):\n # this runs on every connect, but it's okay since we get a new client object every time \n global reminders # FIXME: PUKE\n\n # purge reminders that have passed\n # FIXME: be more intelligent about this\n if client.server in reminders:\n reminders[client.server] = [ r for r in reminders[client.server] if schedule_reminder(client, r) ]\n\n save(client)\n\ndef save (client):\n data_writer.add(\"reminders.db\", dict(reminders))\n\ndef schedule_reminder (client, reminder):\n nick, t, channel, remindtime, remindmsg = reminder\n dtime = datetime.datetime.fromtimestamp(remindtime) + datetime.timedelta(microseconds=999999)\n reminddelta = remindtime - time.time()\n \n if reminddelta > 0: # only in the future\n client.schedule(reminddelta, client.say, channel, \"{0}: Sent {1}: <{2}> {3}\".format(\n t, humanize.naturaltime(dtime - datetime.datetime.now()), nick, remindmsg\n ))\n\n return True\n\n return False\n\ndef irc_public (client, hostmask, channel, message):\n nick, user, host = util.ircmask_split(hostmask)\n \n if message.startswith('!remind'):\n _, target, msg = message.split(' ', 2)\n dmsg = msg.strip()\n #dmsg, remindmsg = msg.split(':', 1)\n\n t = target.lower()\n \n if t == \"me\":\n t = nick.lower()\n\n matches = pdt.nlp(' '.join([x for x in dmsg.split() if not x.startswith('http')]))\n if matches != None:\n dtime, flags, spos, epos, mtext = matches[0] # first matched date-like object\n\n # convert into unix timestamp FIXME: probably blows up when timezone/DST\n remindtime = time.mktime(dtime.timetuple())\n remindmsg = (msg[:spos] + msg[epos:]).strip()\n\n if remindmsg in ['oven', 'stove', 'microwave']:\n remindmsg = \"BEEP\" * random.randint(5, 8)\n\n reminder = (nick, t, channel, remindtime, remindmsg)\n if schedule_reminder(client, reminder):\n reminders[client.server].append(reminder)\n save(client)\n client.say(channel, services['phrases'].format('success', \"I'll remind {0} {1}!\".format(t, humanize.naturaltime(dtime + datetime.timedelta(microseconds=999999)))))\n #client.msg(channel, \"Okay, I'll remind {0} {1}!\".format(t, humanize.naturaltime(dtime + datetime.timedelta(microseconds=999999))))\n else:\n client.say(channel, \"I'm not a time traveler!\")\n else:\n client.say(channel, \"Sorry, I didn't catch that....\")\n" }, { "alpha_fraction": 0.510004460811615, "alphanum_fraction": 0.5206758379936218, "avg_line_length": 42.25, "blob_id": "b1b0b1d0040bdfdc732eb7d5bbeb7a5da3ac2391", "content_id": "78863536b21b950db2c9e0dc5a298d9ff71eb760", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2249, "license_type": "no_license", "max_line_length": 139, "num_lines": 52, "path": "/plugins/admin.py", "repo_name": "NightKev/suikabot-py", "src_encoding": "UTF-8", "text": "from modules import util\n\ndef irc_public (client, hostmask, channel, message):\n nick, user, host = util.ircmask_split(hostmask)\n if client.access_list.check(hostmask, client.access_list.LEVEL_OWNER):\n if message.startswith('!'):\n args = message.split(' ')\n cmd = args.pop(0)\n \n if cmd == '!access':\n client.access_list.add(args[0], int(args[1]))\n client.say(channel, 'Added mask {0} with level {1}'.format(args[0], args[1]))\n\n if cmd == '!join':\n client.join(args[0])\n \n if cmd == '!leave':\n client.leave(channel)\n \n if cmd == '!reload':\n errs = client.plugins.reload()\n # HMMMMMMMMMMMMM something might need this, breaks reminders atm\n #client.dispatch_to_plugins(\"client_connected\")\n errstring = '{0} Error(s)'.format(len(errs))\n if errs: errstring += ' | ' + ', '.join(['{0}({2}): {1}'.format(p, e, l) for p, e, l in errs[:2]])\n\n client.say(channel, 'Plugins Reloaded! ({0})'.format(errstring))\n \n if message.startswith('!alias'):\n message = util.stripFormatting(message)\n args = message.split(' ')[1:]\n cmd, target = args[0:2]\n\n if cmd == 'list':\n client.say(channel, 'Aliases for {0}: {1}'.format(target, ' '.join([a for a in client.alias_map.get_aliases(target.lower())])))\n\n if client.access_list.check(hostmask, client.access_list.LEVEL_OWNER): #or \\\n #nick.lower() in client.alias_map.get_aliases(target.lower()):\n if cmd == 'add':\n alias = args[2]\n success = client.alias_map.add(target, alias)\n\n if success:\n client.say(channel, 'Alias ({0} <-> {1}) successfully added!'.format(target, alias))\n else:\n client.say(channel, 'An alias for {0} already exists.'.format(target, alias))\n \n if cmd == 'remove':\n success = client.alias_map.remove(target)\n\n if success:\n client.say(channel, 'Successfully removed alias for {0}.'.format(target))\n" } ]
10
iheartradio/pipeline
https://github.com/iheartradio/pipeline
fb1e53ae60324dabb5be46f1a9ba80b82cd09527
7e1ea3ffd668c76e4aec73d6dae63789161f8feb
91983f2011c03ed35a2248bc1c2f29b267efef6d
refs/heads/master
2022-10-05T16:10:46.427581
2022-09-20T16:27:49
2022-09-20T16:27:49
37,540,762
0
0
null
2015-06-16T16:02:48
2021-11-29T22:24:08
2022-09-20T16:27:49
Python
[ { "alpha_fraction": 0.6200854778289795, "alphanum_fraction": 0.6200854778289795, "avg_line_length": 32.91304397583008, "blob_id": "27cace7fe3f511da96bf3c9a7ccc428bd4b4322f", "content_id": "45a8fc1668cfdd280db08c39b7d45e35be1b6923", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2340, "license_type": "no_license", "max_line_length": 75, "num_lines": 69, "path": "/pipeline/diagrams.py", "repo_name": "iheartradio/pipeline", "src_encoding": "UTF-8", "text": "\"\"\"Generates Bindings Diagram.\"\"\"\n\nfrom diagrams import Cluster, Diagram, Edge\nfrom diagrams.onprem.container import Docker\nfrom diagrams.onprem.queue import RabbitMQ\n\n\ndef _generate_exchange_cluster(\n service_node: Docker,\n node_mapping: dict[str, str],\n direction: str\n):\n \"\"\"Generate a cluster node.\n\n Args:\n service_node: Node representing the main service.\n node_mapping: Mapping of exchange to routing key.\n direction: Incoming, or outcoming.\n\n \"\"\"\n with Cluster(direction):\n for node_name, routing_key in node_mapping.items():\n edge: Edge = Edge(color='black', label=routing_key)\n if direction == 'Incoming':\n exchange_node: RabbitMQ = RabbitMQ(node_name)\n exchange_node >> edge >> service_node\n elif direction == 'Outgoing':\n exchange_node: Docker = Docker(node_name)\n service_node >> edge >> exchange_node\n\n\ndef generate_bindings_diagram(\n settings: dict, *, filename: str = 'docs/images/bindings'\n) -> None:\n \"\"\"Generate bindings diagram for specified in settings dict.\n\n Args:\n settings: Dictionary of settings.\n filename: Location of where to write file.\n\n \"\"\"\n service_name: str = settings.DIAGRAMS_SERVICE_NAME\n with Diagram(\n f'{service_name.upper()} BINDINGS',\n direction='LR',\n filename=filename):\n service_node: Docker = Docker(service_name)\n\n # Draw incoming exchanges and routing keys\n _generate_exchange_cluster(\n service_node, settings.DIAGRAMS_INCOMING_EXCHANGES, 'Incoming')\n\n # Really only need to show this if it's a topic exchange.\n # Used when connecting to multiple outgoing queues\n outgoing_exchange_node: RabbitMQ = None\n if settings.DIAGRAMS_OUTGOING_EXCHANGE:\n outgoing_exchange_node = RabbitMQ(\n settings.DIAGRAMS_OUTGOING_EXCHANGE)\n service_node >> Edge(\n color='black', label='#') >> outgoing_exchange_node\n next_node = outgoing_exchange_node\n else:\n next_node = service_node\n\n # Draw outgoing queues with routing keys\n _generate_exchange_cluster(\n next_node,\n settings.DIAGRAMS_OUTGOING_QUEUES,\n 'Outgoing')\n" }, { "alpha_fraction": 0.2935926020145416, "alphanum_fraction": 0.6595473289489746, "avg_line_length": 26.04310417175293, "blob_id": "6409302f6203580a2cf003b6f1e5cfbcd3e35c69", "content_id": "9f5f8939231de68d79f0f9a70cfb0d522e1f120e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3137, "license_type": "no_license", "max_line_length": 60, "num_lines": 116, "path": "/tests/test_misc.py", "repo_name": "iheartradio/pipeline", "src_encoding": "UTF-8", "text": "\"\"\"Miscellaneous tests.\"\"\"\n\nimport pytest\n\nimport pipeline\n\n\[email protected]('isrc, expected', (\n ('12-345-67-89012', '123456789012'),\n ('21-098-76-54321', '210987654321'),\n))\ndef test_normalize_isrc(isrc, expected):\n \"\"\"Test that ISRCs with dashes are transformed.\"\"\"\n actual = pipeline.normalize_isrc(isrc)\n assert actual == expected\n\n\[email protected]('isrc', (\n '123456789012',\n '210987654321',\n))\ndef test_normalize_isrc_unchanged(isrc):\n \"\"\"Test that ISRCs without dashes are unchanged.\"\"\"\n assert isrc == pipeline.normalize_isrc(isrc)\n\n\[email protected]('isrc, expected', (\n ('qm-9k-3120-0284', 'QM9K31200284'),\n ('qm9k31200284', 'QM9K31200284'),\n))\ndef test_upper_cased_normalize_isrc(isrc, expected):\n \"\"\"Test that ISRCs with dashes are transformed.\"\"\"\n actual = pipeline.normalize_isrc(isrc)\n assert actual == expected\n\n\[email protected]('upc, expected', (\n ('00616892587125', '616892587125'),\n ('00076743106828', '076743106828'),\n ('00044003728271', '044003728271'),\n ('00802097028420', '802097028420'),\n ('00061528101723', '061528101723'),\n ('00619061375226', '619061375226'),\n ('00044003727151', '044003727151'),\n ('00035561301228', '035561301228'),\n ('00803467000923', '803467000923'),\n ('00619061218523', '619061218523'),\n ('00856811001800', '856811001800'),\n ('00823674300234', '823674300234'),\n ('00775020927629', '775020927629'),\n ('00044003723795', '044003723795'),\n ('00821826000162', '821826000162'),\n ('00619061368020', '619061368020'),\n ('00053361303525', '053361303525'),\n ('00805386002729', '805386002729'),\n ('00053361309428', '053361309428'),\n ('00856811001794', '856811001794'),\n))\ndef test_normalize_upc_leading_zeros(upc, expected):\n \"\"\"Test that UPCs with leading zeros are transformed.\"\"\"\n actual = pipeline.normalize_upc(upc)\n assert actual == expected\n\n\[email protected]('upc', (\n '80330753510997',\n '80330753513226',\n '80330753510362',\n '80330753510447',\n '80330753510317',\n '80330753510355',\n '80330753510300',\n '80330753510430',\n '80330753510324',\n '80330753510348',\n '80330753511376',\n '80330753510539',\n '80330753510157',\n '80330753510423',\n '80330753510607',\n '80330753510461',\n '80330753510577',\n '80330753510188',\n '80330753510393',\n '80330753510560',\n))\ndef test_normalize_upc_too_long_unchanged(upc):\n \"\"\"Test that UPCs that are too long are unchanged.\"\"\"\n assert upc == pipeline.normalize_upc(upc)\n\n\[email protected]('upc', (\n '018736260971',\n '616822105825',\n '889845354086',\n '111118824126',\n '634479093388',\n '889176232091',\n '859715025446',\n '702730622643',\n '800684021212',\n '811868219042',\n '785688016726',\n '889845213406',\n '859712876850',\n '885767949478',\n '775957086963',\n '803057002429',\n '811868646121',\n '829410132374',\n '887516926396',\n '642738977492',\n))\ndef test_normalize_upc_valid_unchanged(upc):\n \"\"\"Test that valid UPCs are unchanged.\"\"\"\n assert upc == pipeline.normalize_upc(upc)\n" }, { "alpha_fraction": 0.6494284272193909, "alphanum_fraction": 0.6929776668548584, "avg_line_length": 23.013071060180664, "blob_id": "99a6411d4e151cb59c3fbb9efd0e5ef6574f843b", "content_id": "37c31bd1451891e72ad481f65bb440cf5b3ed66f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 3674, "license_type": "no_license", "max_line_length": 79, "num_lines": 153, "path": "/docs/changes.rst", "repo_name": "iheartradio/pipeline", "src_encoding": "UTF-8", "text": "=========\nChangelog\n=========\n\nVersion 2.7.0\n==============\n\n- Adding an ``action`` key in the offers schema, this specifies the\n action [``upsert`` or ``takedown``] at offers level.\n\nVersion 2.6.0\n==============\n\n- Support for generation of service level RabbitMQ bindings diagrams.\n\nVersion 2.5.0\n==============\n\nReleased 2020-03-25\n\n- Adding an optional ``artists`` key in the artist schema,\n this lists the individual artists if present in the DDEX delivery.\n\nVersion 2.4.0\n==============\n\nReleased 2019-02-22\n\n- Support for ``exchange_name`` added\n\nVersion 2.3.0\n==============\n\nReleased 2019-02-13\n\n- Update ``genre`` to be ``Optional``\n\nVersion 2.2.0\n==============\n\nReleased 2018-09-13\n\n- Update ``copyright`` to be ``Optional``\n\nVersion 2.1.1\n==============\n\nReleased 2018-07-24\n\n- Add Uppercase to ISRC normalization\n\nVersion 2.1.0\n==============\n\nReleased 2018-07-19\n\n- Reduce bread crumb message size\n\nVersion 2.0.0\n==============\n\nReleased 2018-07-05\n\n- Update the message schema to a derivative of schema.org's MusicAlbum_\n- Add ``fanout`` to generate unique a ``job_id`` for outgoing messages sent by\n services that send multiple outgoing messages for each incoming one\n- Drop support for information related to physical products\n- Add ``normalize_isrc`` and ``normalize_upc`` to handle transforming raw\n identifiers into their normalized formats\n- Add ``purge`` schema to handle Sony purge deliveries\n- Accept empty list as the ``offers`` field in ``product``\n- Add ``Optional`` track level ``grid`` validation\n- Swapped ``offers`` date parsing validation from\n ``voluptuous.Datetime`` -> ``python-dateutil.parser``\n- Add ``None`` as acceptable for Optional fields in the product document schema\n- Handle ``None`` in OffsetAwareDatetime\n\nVersion 1.0.0\n=============\n\nReleased 2016-12-20\n\n- Add ``prepare_incoming_message`` to support the new common message structure\n- Rename ``prepare_message`` to ``prepare_outgoing_message`` and remove the\n arguments that are no longer needed with the changes to the common message\n structure (*backwards incompatible*)\n- ``send_message`` no longer accepts the ``event`` argument (*backwards\n incompatible*)\n\n\nVersion 0.4.0\n=============\n\nReleased 2016-09-21\n\n- Make ``duration`` a required field for products\n- Add ``routing_key`` argument to ``send_message``\n\nVersion 0.3.0\n=============\n\nReleased 2016-03-14\n\n- Make ``ignore_provider``, ``send_message``, and ``send_error`` into\n coroutines\n- Add ``validate_schema`` to handle validating document schemas\n- Add additional fields to media schema for audio files\n- Remove settings module (*Backward Incompatible*)\n- Add ``jsonify`` and ``nosjify`` coroutines for serializing and deserializing\n messages\n- Serialize outgoing messages in ``send_message`` and ``send_error``\n- Add ``takedown`` and ``delivery`` schemas\n- Remove ``windows_drm_id``\n- Make media optional\n- Set track bundle counts\n- Make sub label names optional\n\nVersion 0.2.0\n=============\n\nReleased 2015-11-19\n\n- Add ``prepare_message`` to handle formatting messages with the common message\n structure\n- Add ``send_message`` to handle sending messages through the specified\n producer\n- Add function to iterate over schema validation error messages\n- Add ``send_error`` to handle sending error messages through the specified\n producer\n- Add settings module to provide settings available to all pipeline services\n\nVersion 0.1.2\n=============\n\nReleased 2015-08-17\n\n- Move release to track bundle\n\nVersion 0.1.1\n=============\n\nReleased 2015-08-13\n\n- Remove unsupported usage rules\n\nVersion 0.1.0\n=============\n\nReleased 2015-07-31\n\n- Initial release\n\n.. _MusicAlbum: https://schema.org/MusicAlbum\n" }, { "alpha_fraction": 0.709172248840332, "alphanum_fraction": 0.7099179625511169, "avg_line_length": 29.13483238220215, "blob_id": "8c3677150cb74556ced00c137e2663923d661791", "content_id": "e5a89bed5d66cead6ad1d3c78b8a13c1219a8321", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2682, "license_type": "no_license", "max_line_length": 68, "num_lines": 89, "path": "/tests/test_ignore_provider.py", "repo_name": "iheartradio/pipeline", "src_encoding": "UTF-8", "text": "\"\"\"Test ignore_provider.\"\"\"\n\nimport pytest\n\nfrom henson.exceptions import Abort\nfrom pipeline import ignore_provider\n\nTEST_PROVIDER = 'testing'\nTEST_MESSAGE = {'provider': TEST_PROVIDER}\n\n\[email protected]\nasync def test_empty_lists(test_app):\n \"\"\"Test ignore_provider with empty lists of providers.\"\"\"\n test_app.settings['INCLUDED_PROVIDERS'] = []\n test_app.settings['EXCLUDED_PROVIDERS'] = []\n\n actual = await ignore_provider(test_app, TEST_MESSAGE)\n assert actual\n\n\[email protected]\nasync def test_excluded(test_app):\n \"\"\"Test ignore_provider with excluded providers.\"\"\"\n test_app.settings['INCLUDED_PROVIDERS'] = []\n test_app.settings['EXCLUDED_PROVIDERS'] = [TEST_PROVIDER + '1']\n\n actual = await ignore_provider(test_app, TEST_MESSAGE)\n assert actual\n\n\[email protected]\nasync def test_excluded_ignore(test_app):\n \"\"\"Test ignore_provider ignores with excluded providers.\"\"\"\n test_app.settings['INCLUDED_PROVIDERS'] = []\n test_app.settings['EXCLUDED_PROVIDERS'] = [TEST_PROVIDER]\n\n with pytest.raises(Abort):\n await ignore_provider(test_app, TEST_MESSAGE)\n\n\[email protected]\nasync def test_included(test_app):\n \"\"\"Test ignore_provider with included providers.\"\"\"\n test_app.settings['INCLUDED_PROVIDERS'] = [TEST_PROVIDER]\n test_app.settings['EXCLUDED_PROVIDERS'] = []\n\n actual = await ignore_provider(test_app, TEST_MESSAGE)\n assert actual\n\n\[email protected]\nasync def test_included_ignore(test_app):\n \"\"\"Test ignore_provider ignores with included providers.\"\"\"\n test_app.settings['INCLUDED_PROVIDERS'] = [TEST_PROVIDER + '1']\n test_app.settings['EXCLUDED_PROVIDERS'] = []\n\n with pytest.raises(Abort):\n await ignore_provider(test_app, TEST_MESSAGE)\n\n\[email protected]\nasync def test_included_and_excluded(test_app):\n \"\"\"Test ignore_provider with included and excluded providers.\"\"\"\n test_app.settings['INCLUDED_PROVIDERS'] = [TEST_PROVIDER]\n test_app.settings['EXCLUDED_PROVIDERS'] = [TEST_PROVIDER]\n\n actual = await ignore_provider(test_app, TEST_MESSAGE)\n assert actual\n\n\[email protected]\nasync def test_no_providers(test_app):\n \"\"\"Test ignore_provider with no providers.\"\"\"\n test_app.settings.pop('INCLUDED_PROVIDERS', None)\n test_app.settings.pop('EXCLUDED_PROVIDERS', None)\n\n actual = await ignore_provider(test_app, TEST_MESSAGE)\n assert actual\n\n\[email protected]\nasync def test_none(test_app):\n \"\"\"Test ignore_provider with none providers.\"\"\"\n test_app.settings['INCLUDED_PROVIDERS'] = None\n test_app.settings['EXCLUDED_PROVIDERS'] = None\n\n actual = await ignore_provider(test_app, TEST_MESSAGE)\n assert actual\n" }, { "alpha_fraction": 0.5140449404716492, "alphanum_fraction": 0.5308988690376282, "avg_line_length": 18.77777862548828, "blob_id": "cafa3dfeca6995a7931e39b321a5b2d9106e534c", "content_id": "df87c2ed245f0d689b83906fad3c09080c0c224b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 356, "license_type": "no_license", "max_line_length": 46, "num_lines": 18, "path": "/setup.py", "repo_name": "iheartradio/pipeline", "src_encoding": "UTF-8", "text": "from setuptools import find_packages, setup\n\nsetup(\n name='pipeline',\n version='2.7.0',\n packages=find_packages(exclude=['tests']),\n install_requires=[\n 'Henson>=0.5.0',\n 'diagrams',\n 'Pygments',\n 'python-dateutil',\n 'python-decouple',\n 'voluptuous',\n ],\n tests_require=[\n 'tox',\n ],\n)\n" }, { "alpha_fraction": 0.6850489974021912, "alphanum_fraction": 0.6850489974021912, "avg_line_length": 21.66666603088379, "blob_id": "3f99c2d05cc0baed00ff30b68274a7fb1aba549b", "content_id": "95b0f6ec848465e3d998a8a9f82b1c4a2d590eb1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 816, "license_type": "no_license", "max_line_length": 78, "num_lines": 36, "path": "/docs/schemas.rst", "repo_name": "iheartradio/pipeline", "src_encoding": "UTF-8", "text": "=======\nSchemas\n=======\n\npipeline provides validation schemas for many common data structures used by\nservices. To use a schema to validate a dictionary::\n\n from pipeline.schema import artist\n\n doc = {'name': 'pipeline artist'}\n artist(doc)\n\nIn order to handle validation errors yourself, you will need to wrap the check\nin a try/except block::\n\n from pipeline.schema import artist, MultipleInvalid\n\n doc = {'invalid-field': 'invalid'}\n try:\n artist(doc)\n except MultipleInvalid:\n logging.error('The artist was invalid.')\n\nAvailable schemas\n=================\n\n.. automodule:: pipeline.schema\n :members:\n :exclude-members: iter_errors, ValidationError\n\nError handling\n==============\n\n.. autoclass:: pipeline.schema.ValidationError\n\n.. autofunction:: pipeline.schema.iter_errors\n" }, { "alpha_fraction": 0.6167247295379639, "alphanum_fraction": 0.6271777153015137, "avg_line_length": 21.076923370361328, "blob_id": "aa2f127ed23883f920e545eb3a95145ec70b2b60", "content_id": "edc4a3e6ced19c85cbd117d439d3275207df4a54", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 574, "license_type": "no_license", "max_line_length": 52, "num_lines": 26, "path": "/tests/test_jsonify.py", "repo_name": "iheartradio/pipeline", "src_encoding": "UTF-8", "text": "\"\"\"Test jsonify.\"\"\"\n\nfrom collections import namedtuple\n\nimport pytest\n\nfrom pipeline import jsonify, nosjify\n\nMessage = namedtuple('Message', ('body',))\n\n\[email protected]\[email protected]('expected', (\n {'a': 1, 'b': 'c'},\n {'a': {'b': {'c': 'd'}}},\n [1, 2, 3, 4],\n 'a',\n 1,\n))\nasync def test_jsonify(test_app, expected):\n \"\"\"Test jsonify.\"\"\"\n intermediate = await jsonify(test_app, expected)\n message = Message(intermediate)\n actual = await nosjify(test_app, message)\n assert actual == expected\n assert actual != intermediate\n" }, { "alpha_fraction": 0.6532567143440247, "alphanum_fraction": 0.6551724076271057, "avg_line_length": 13.472222328186035, "blob_id": "4fc20cb0d7a66dae682f9d65b3e26c107e839e8b", "content_id": "f9ff96f60c29746f5c41595c4786353c2cf9cf81", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 522, "license_type": "no_license", "max_line_length": 79, "num_lines": 36, "path": "/docs/index.rst", "repo_name": "iheartradio/pipeline", "src_encoding": "UTF-8", "text": "========\npipeline\n========\n\npipeline is a library containing common utilities used by the components of the\nIngestion Pipeline.\n\nInstallation\n============\n\nIf you are using the internal package index server, you can install pipeline\nusing Pip::\n\n $ pip install pipeline\n\nOtherwise, pipeline can be installed from source::\n\n $ python setup.py install\n\nContents:\n\n.. toctree::\n :maxdepth: 2\n\n api\n schemas\n changes\n\n\n\nIndices and tables\n==================\n\n* :ref:`genindex`\n* :ref:`modindex`\n* :ref:`search`\n\n" }, { "alpha_fraction": 0.6765920519828796, "alphanum_fraction": 0.6794174909591675, "avg_line_length": 30.95138931274414, "blob_id": "cc5743ffcc0478108d9bf8810bb94b5161aaba01", "content_id": "ae1a378f8f1b1ec661026d2a6a0512837db50f75", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4601, "license_type": "no_license", "max_line_length": 78, "num_lines": 144, "path": "/tests/test_messages.py", "repo_name": "iheartradio/pipeline", "src_encoding": "UTF-8", "text": "\"\"\"Test message-related functionality.\"\"\"\n\nfrom collections import namedtuple\nfrom datetime import datetime\nimport uuid\n\nimport pytest\n\nfrom pipeline import (\n fanout,\n nosjify,\n prepare_incoming_message,\n prepare_outgoing_message,\n send_error,\n send_message,\n)\n\nDATETIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%f'\n\nMessage = namedtuple('Message', ('body',))\n\n\[email protected]\nasync def test_events_is_added(test_app):\n \"\"\"Test that the events list is added to messages without it.\"\"\"\n actual = await prepare_incoming_message(test_app, {})\n assert 'events' in actual\n\n\[email protected]\nasync def test_fanout_adds_ancestor_id(test_app):\n \"\"\"Test that fanout adds the original job_id as an ancestor_id.\"\"\"\n original = {'job_id': 1, 'ancestor_ids': []}\n result = await fanout(test_app, original)\n assert original['job_id'] in result['ancestor_ids']\n\n\[email protected]\nasync def test_fanout_does_not_change_original_message(test_app):\n \"\"\"Test that fanout doesn't change the original message.\"\"\"\n expected = 1\n original = {'job_id': expected, 'ancestor_ids': []}\n result = await fanout(test_app, original)\n assert original['job_id'] == expected\n\n\[email protected]\nasync def test_fanout_new_job_id(test_app):\n \"\"\"Test that fanout assigns a new job_id.\"\"\"\n original = {'job_id': 1, 'ancestor_ids': []}\n result = await fanout(test_app, original)\n assert result['job_id'] != original['job_id']\n\n\[email protected]\nasync def test_job_id_is_added(test_app):\n \"\"\"Test that a job id is added to messages without one.\"\"\"\n actual = await prepare_incoming_message(test_app, {})\n assert 'job_id' in actual\n\n\[email protected]\nasync def test_job_id_is_preserved(test_app):\n \"\"\"Test that existing job ids are preserved.\"\"\"\n actual = await prepare_incoming_message(test_app, {'job_id': 1})\n assert actual['job_id'] == 1\n\n\[email protected]\nasync def test_app_is_hoisted_to_previous_event(test_app):\n \"\"\"Test that the app is copied to the previous event.\"\"\"\n actual = await prepare_incoming_message(\n test_app, {'message': 1})\n assert actual['events'][-1]['app'] == test_app.name\n\n\[email protected]('key', (\n 'app',\n 'event_id',\n 'received_at',\n))\[email protected]\nasync def test_new_event_has_field(key, test_app):\n \"\"\"Test that the new event has the specified field.\"\"\"\n actual = await prepare_incoming_message(test_app, {})\n assert actual['events'][-1][key]\n\n\[email protected]\nasync def test_new_event_is_added(test_app):\n \"\"\"Test that a new event is added.\"\"\"\n actual = await prepare_incoming_message(\n test_app, {'events': [{}], 'message': ''})\n assert len(actual['events']) == 2\n\n\[email protected]\nasync def test_new_event_received_at_is_datetime(test_app):\n \"\"\"Test that the new event's received timestamp is a datetime.\"\"\"\n actual = await prepare_incoming_message(test_app, {})\n assert isinstance(datetime.strptime(\n actual['events'][-1]['received_at'], DATETIME_FORMAT), datetime)\n\n\ndef test_new_event_updated_at_is_datetime(test_app):\n \"\"\"Test that the new event's updated timestamp is a datetime.\"\"\"\n actual = prepare_outgoing_message({'events': [{}]})\n assert isinstance(datetime.strptime(\n actual['events'][-1]['updated_at'], DATETIME_FORMAT), datetime)\n\n\[email protected]\nasync def test_originated_at_is_datetime(test_app):\n \"\"\"Test that the initial timestamp is a datetime.\"\"\"\n actual = await prepare_incoming_message(test_app, {})\n assert isinstance(\n datetime.strptime(actual['originated_at'], DATETIME_FORMAT), datetime)\n\n\[email protected]\nasync def test_originated_at_is_preserved(test_app):\n \"\"\"Test that existing initial timestamps are preserved.\"\"\"\n actual = await prepare_incoming_message(test_app, {'originated_at': 1})\n assert actual['originated_at'] == 1\n\n\[email protected]\nasync def test_send_error(test_producer):\n \"\"\"Test that the provided message is sent.\"\"\"\n expected = {'message': 'test_message', 'events': [{}]}\n await send_error(expected, producer=test_producer)\n actual = await nosjify(None, Message(test_producer.sent_error))\n\n assert actual['message'] == expected['message']\n\n\[email protected]\nasync def test_send_message(test_producer):\n \"\"\"Test that the provided message is sent.\"\"\"\n expected = {'message': 'test_message', 'events': [{}]}\n await send_message(expected, producer=test_producer)\n actual = await nosjify(None, Message(test_producer.sent_message))\n\n assert actual['message'] == expected['message']\n" }, { "alpha_fraction": 0.6508469581604004, "alphanum_fraction": 0.6538317799568176, "avg_line_length": 27.634614944458008, "blob_id": "365dfc77416c716ccd96a1bfbfe6280ecd39c1a6", "content_id": "93965ffb1c15a8e7627a9113d9f0b8f66dbb1f7b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13401, "license_type": "no_license", "max_line_length": 78, "num_lines": 468, "path": "/pipeline/schema.py", "repo_name": "iheartradio/pipeline", "src_encoding": "UTF-8", "text": "\"\"\"Schemas that can be used for validating common data types.\"\"\"\n\n# NOTE: The docstrings immediately following each schema are provided\n# solely for Sphinx's autodoc. They are not useful Python docstrings and\n# cannot be consumed by help() or a REPL.\n\nfrom collections import namedtuple\nimport dateutil.parser\nfrom functools import partial\n\nfrom henson.exceptions import Abort\nfrom voluptuous import (\n Any,\n Invalid,\n MultipleInvalid,\n Optional,\n Schema,\n TypeInvalid,\n)\n\n__all__ = ('iter_errors', 'validate_schema')\n\nSchemaAllRequired = partial(Schema, required=True)\n\nCOMMERCIAL_MODEL_TYPES = (\n 'AdvertisementSupportedModel',\n 'DeviceFeeModel',\n 'PayAsYouGoModel',\n 'RightsClaimModel',\n 'SubscriptionModel'\n)\n\nUSE_TYPES = (\n 'ConditionalDownload',\n 'iHeartCustom',\n 'NonInteractiveStream',\n 'OnDemandStream',\n 'PermanentDownload'\n)\n\n\nValidationError = namedtuple('ValidationError', 'error message value')\n\"\"\"A wrapper around a validation error.\n\nArgs:\n error (voluptuous.Error): The validation error.\n message (str): A friendly error message. If provided, more\n information than the message associated with ``error``.\n value: The value that failed validation. This will only be provided\n when the object being validated contained the field.\n\"\"\"\n\n\nclass CommercialModelTypeInvalid(Invalid):\n \"\"\"The value is not a valid commercialModelType.\n\n .. versionadded: 1.1.0\n \"\"\"\n\n\ndef CommercialModelType(value): # NOQA: N802\n \"\"\"Validate CommercialModelType.\n\n Args:\n value (str): The commercialModelType in the message.\n\n Returns:\n str: The same commercialModelType passed into the function.\n\n Raises:\n CommercialModelTypeInvalid: If the commercialModelTypes\n is not defined in COMMERCIAL_MODEL_TYPES.\n\n .. versionadded: 1.1.0\n\n \"\"\"\n if value not in COMMERCIAL_MODEL_TYPES:\n raise CommercialModelTypeInvalid(\n \"Expected one of '{0}', got '{1}'.\".format(\n \"', '\".join(COMMERCIAL_MODEL_TYPES), value))\n return value\n\n\ndef iter_errors(exc, data):\n \"\"\"Return a generator containing validation errors.\n\n Args:\n exc (voluptuous.MultipleInvalid): The exception raised when\n validating against a schema.\n data (dict): The document being validated.\n\n Yields:\n ValidationError: The error.\n\n \"\"\"\n # Get a copy of the original value so data can be reset in the loop.\n original = data\n\n # Loop through all the errors and yield the error message and the\n # value to which it refers.\n for error in exc.errors:\n data = None\n if isinstance(error, TypeInvalid):\n data = original\n\n # voluptuous provides a path of keys and indexes that can be\n # used to retrieve the value.\n for key_or_index in error.path:\n data = data.__getitem__(key_or_index)\n\n msg = '{}, got {}'.format(error, type(data).__name__)\n else:\n msg = str(error)\n\n yield ValidationError(error, msg, data)\n\n\nclass UseTypeInvalid(Invalid):\n \"\"\"The value is not a valid useType.\n\n .. versionadded: 1.1.0\n \"\"\"\n\n\ndef UseType(value): # NOQA: N802\n \"\"\"Validate useType.\n\n Args:\n value (list): The useTypes in the message.\n\n Returns:\n list: The same useTypes passed into the function.\n\n Raises:\n UseTypeInvalid: If one of the useTypes is not defined in\n ``USE_TYPES``.\n\n .. versionadded: 1.1.0\n\n \"\"\"\n for v in value:\n if v not in USE_TYPES:\n raise UseTypeInvalid(\"Expected one of '{0}', got '{1}'.\".format(\n \"', '\".join(USE_TYPES), v))\n return value\n\n\ndef OffsetAwareDatetime(value): # NOQA: N802\n \"\"\"Validate offset aware date time.\n\n Args:\n value (str): Date time.\n\n Returns:\n str: The same date time passed into the function.\n\n Raises:\n Invalid: If date time could not be parsed.\n\n .. versionadded: 1.1.0\n\n \"\"\"\n try:\n dateutil.parser.parse(value)\n return value\n except (TypeError, ValueError, OverflowError):\n raise Invalid('Could not parse date string: {}'.format(value))\n\n\ndef validate_schema(schema, message, logger=None):\n \"\"\"Validate a message against a schema.\n\n Args:\n schema (voluptuous.Schema): The schema against which to\n validate.\n message (dict): The message to validate.\n logger (Optional[logging.RootLogger]): An instance of a logger\n that, if provided, will be used to log the schema validation\n errors.\n\n Returns:\n dict: The validated message upon successful validation.\n\n \"\"\"\n try:\n return schema(message)\n except MultipleInvalid as e:\n if logger is not None:\n logger.error(\n 'schema.invalid', errors=list(iter_errors(e, data=message)))\n raise Abort('schema.invalid', message)\n\n\n# shared sub-types\nartist = SchemaAllRequired({\n 'name': str,\n Optional('artists'): [Any(\n None, Schema({\n 'artist_name': str,\n 'artist_role': str,\n 'sequence_number': Any(None, int),\n }))],\n Optional('url'): Any(None, str),\n})\n\"\"\"Schema to validate an artist.\n\nArgs:\n name (str): The artist's name.\n url (Optional[str]): The artist's URL.\n\"\"\"\n\nparticipant = SchemaAllRequired({\n 'name': str,\n 'role': str,\n})\n\"\"\"Schema to validate a participant.\n\nArgs:\n name (str): The participant's name.\n role (str): The participant's role on the track.\n\"\"\"\n\ncopyright = SchemaAllRequired({\n Optional('text'): Any(None, str),\n Optional('year'): Any(None, int),\n})\n\"\"\"Schema to validate a copyright.\n\nArgs:\n text (Optional(str)): The full copyright text.\n year (Optional[int]): The copyright year.\n\"\"\"\n\n# TODO: Given the number of fields that are optional, this should\n# probably be split up into two separate schemas, one for audio files\n# and one for images. They can inherit from media like track and\n# track_bundle inherit from product.\nmedia = SchemaAllRequired({\n Optional('bitrate'): Any(None, str),\n Optional('channel'): Any(None, int),\n Optional('codec'): Any(None, str),\n Optional('count'): Any(None, int),\n Optional('number'): Any(None, int,),\n Optional('sampleRate'): Any(None, str),\n 'source': str,\n})\n\"\"\"Schema to validate media.\n\n``count`` and ``number`` are more likely to be provided for images than\nfor audio files. ``bitrate``, ``channel``, ``codec``, and\n``sampleRate`` are more likely for audio files.\n\nArgs:\n bitrate (Optional[str]): The bitrate of the media file.\n channel (Optional[int]): The channel of the media file.\n codec (Optional[str]): The codec of the media file.\n count (Optional[int]): The total number of media files.\n number (Optional[int]): The number of the media file.\n sampleRate (Optional[str]): The sample rate of the media file.\n source (str): The location of the media file.\n\"\"\"\n\n# provider-related schemas\nsub_label = SchemaAllRequired({\n Optional('name'): Any(None, str),\n 'countries': [str],\n})\n\"\"\"Schema to valid a sub label.\n\nArgs:\n name (Optional[str]): The sub label's name.\n countries (list): A list of countries.\n\"\"\"\n\nlabel = SchemaAllRequired({\n 'name': str,\n 'subLabels': [sub_label],\n})\n\"\"\"Schema to validate a label.\n\nArgs:\n name (str): The label's name.\n subLabels (list): A list of sub labels.\n\"\"\"\n\nprovider = SchemaAllRequired({\n 'name': str,\n 'labels': [label],\n})\n\"\"\"Schema to validate a provider.\n\nArgs:\n name (str): The provider's name.\n labels (list): A list of labels.\n\"\"\"\n\noffer = SchemaAllRequired({\n 'action': str,\n 'commercialModelType': CommercialModelType,\n 'licensee': str,\n Optional('price'): Any(None, str),\n 'territoryCode': str,\n 'useType': UseType,\n 'validFrom': Any(None, OffsetAwareDatetime),\n 'validThrough': Any(None, OffsetAwareDatetime),\n})\n\"\"\"Schema to validate an offer.\n\nArgs:\n commercialModelType (CommercialModelType): The commercial model\n between the label or aggregator and their retail partners.\n licensee (str): The licensee for the offer.\n price (Optional[str]): The price used in the territory.\n territoryCode (str): The country code representing the territory.\n useType (UseType): The types of usage that are allowed.\n validFrom (Union[str, None]): The start date of the item's validity\n in ISO-8601 format.\n validThrough (Union[str, None]): The end date of the item's validity\n in ISO-8601 format.\n\"\"\"\n\n\n# products\nproduct = SchemaAllRequired({\n 'action': 'upsert',\n 'amwKey': str,\n 'artist': artist,\n 'copyright': copyright,\n 'duration': str,\n 'explicitLyrics': bool,\n Optional('genre'): Any(None, str),\n Optional('id'): Any(None, int),\n Optional('internalId'): Any(None, str),\n Optional('media'): Any(None, media),\n 'name': str,\n Optional('offers'): [offer],\n 'provider': provider,\n Optional('publisher'): Any(None, str),\n Optional('version'): Any(None, str),\n})\n\"\"\"Schema to validate a product.\n\nArgs:\n action (str): The action to be taken on the product specified by\n ``amwKey``. Must be ``'upsert'``.\n amwKey (str): The product's unique identifier.\n artist (artist): The product's artist.\n copyright (copyright): The product's copyright.\n duration (str): The product's duration in ISO-8601 format.\n explicitLyrics (bool): Whether the product contains explicit\n lyrics.\n genre (str): The product's genre.\n id (Optional(int)): The product's internal id in the Ingestion database.\n internalId (Optional[int]): The track's internal identifier.\n media (media): Media files associated with the product.\n name (str): The product's name.\n offers (list): A list of offers for the product.\n provider (provider): The product's provider.\n publisher (Optional[str]): The product's publisher.\n version (Optional[str]): The product's version.\n\"\"\"\n\ntrack_schema = product.schema.copy()\ntrack_schema.update({\n Optional('alternativeName'): Any(None, str),\n Optional('genre'): Any(None, str),\n Optional('grid'): Any(None, str),\n 'index': int,\n 'isrcCode': str,\n Optional('isrcCodeRaw'): Any(None, str),\n 'number': int,\n Optional('participants'): Any(None, [participant]),\n 'volume': int,\n})\n\ntrack = SchemaAllRequired(track_schema)\n\"\"\"Schema to validate a track.\n\nThis schema is an extension of the :data:`product` schema.\n\nArgs:\n alternativeName (Optional[str]): The track's extended name.\n grid (Optional[str]): The track's Global Release Identifier.\n index (int): The track's index on the track bundle. This is often,\n but not always, based on the ``number``.\n isrcCode (str): The track's International Standard Recording Code.\n isrcCodeRaw (Optional(str)): The raw version of the track's\n International Standard Recording Code.\n number (int): The track's number on the track bundle.\n volume (int): The number of the track bundle's volumes on which the\n track appears.\n\"\"\"\n\ntrack_bundle_schema = product.schema.copy()\ntrack_bundle_schema.update({\n 'albumReleaseType': str,\n Optional('catalogNumber'): Any(None, str),\n Optional('ean'): Any(None, str),\n Optional('grid'): Any(None, str),\n Optional('icpn'): Any(None, str),\n 'numTracks': int,\n 'numVolumes': int,\n Optional('productCode'): Any(None, str),\n 'releasedEvent': OffsetAwareDatetime,\n 'tracks': [track],\n 'upc': str,\n Optional('upcRaw'): Any(None, str),\n})\n\ntrack_bundle = SchemaAllRequired(track_bundle_schema)\n\"\"\"Schema to validate a track bundle.\n\nThis schema is an extension of the :data:`product` schema.\n\nArgs:\n albumReleaseType (str): The product type.\n catalogNumber (Optional[str]): The track bundle's catalog number.\n ean (Optional[str]): The track bundle's International Article\n Number.\n grid (Optional[str]): The track bundle's Global Release Identifier.\n icpn (Optional[str]): The track bundle's International Code Product\n Number.\n numTracks (int): The number of tracks.\n numVolumes (int): The number of volumes that make up the\n track bundle.\n productCode (Optional[str]): The track bundle's product code.\n releasedEvent (Date): The product's release date.\n tracks (list): A list of tracks.\n upc (str): The track bundle's Universal Product Code.\n upcRaw (Optional(str)): The raw version of the track bundle's\n Universal Product Code.\n\"\"\"\n\ntakedown = SchemaAllRequired({\n 'action': 'takedown',\n 'amwKey': str,\n}, extra=True)\n\"\"\"Schema to validate a product takedown.\n\nArgs:\n action (str): The action to be taken on the product specified by\n ``amwKey``. Must be ``'takedown'``.\n amwKey (str): The product's amwKey.\n\"\"\"\n\npurge = SchemaAllRequired({\n 'action': 'purge',\n 'grid': str,\n 'icpn': str,\n}, extra=True)\n\"\"\"Schema to validate a product purge.\n\nArgs:\n action (str): Must be 'purge', indicates which action persist should take.\n grid (str): the GRiD for the product to be purged.\n icpn (str): the ICPN for the product to be purged.\n\"\"\"\n\ndelivery = Any(track_bundle, takedown, purge)\n\"\"\"Schema to validate a partner delivery.\n\nContent must match the schema of either ``takedown`` or\n``track_bundle``.\n\"\"\"\n\n\ndel track_schema\ndel track_bundle_schema\n" }, { "alpha_fraction": 0.6323071122169495, "alphanum_fraction": 0.6367942094802856, "avg_line_length": 27.859712600708008, "blob_id": "0704c7f7fe666ab9eed1ded9dce3472c85dc2652", "content_id": "ca8672e382600e0185b1df6f4a80031e3210a162", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8023, "license_type": "no_license", "max_line_length": 190, "num_lines": 278, "path": "/pipeline/__init__.py", "repo_name": "iheartradio/pipeline", "src_encoding": "UTF-8", "text": "\"\"\"Common utilities for the Ingestion Pipeline.\"\"\"\n\nfrom copy import deepcopy\nfrom datetime import datetime\nimport json\nimport uuid\n\nfrom henson.exceptions import Abort\n\n__all__ = ('fanout', 'ignore_provider', 'jsonify', 'normalize_isrc', 'normalize_upc', 'nosjify', 'prepare_incoming_message', 'prepare_outgoing_message', 'send_error', 'send_message') # noqa\n\n\nasync def fanout(app, message):\n \"\"\"Return a message fanned out from the original message.\n\n Messages that are fanned out from other messages will receive a new\n ``job_id``. The original message's ``job_id`` will be added to the\n list of ancestors.\n\n Args:\n app (henson.base.Application): The application instance that\n generated the message.\n message (dict): The original message.\n\n Returns:\n dict: A copy of the original message with its own ``job_id``.\n\n .. versionadded:: 1.1.0\n\n \"\"\"\n message = deepcopy(message)\n\n message['ancestor_ids'].append(message['job_id'])\n message['job_id'] = str(uuid.uuid4())\n\n return message\n\n\nasync def ignore_provider(app, message):\n \"\"\"Return whether a provider should be ignored.\n\n This function will check for ``INCLUDED_PROVIDERS`` and\n ``EXCLUDED_PROVIDERS`` settings on ``app`` If the former is not\n empty, ``False`` will be returned for any provider in the list and\n ``True`` for all other providers. The provider will only be checked\n against ``EXCLUDED_PROVIDERS`` when the list of included providers\n is empty.\n\n The schema of the incoming message should be validated before using\n this function.\n\n Args:\n app (henson.base.Application): The application instance that\n received the message.\n message (dict): The incoming message. It should contain a key\n named ``'provider'``.\n\n Returns:\n dict: The incoming message.\n\n Raises:\n henson.exceptions.Abort: The provider should be ignored.\n KeyError: No provider is included in the message.\n\n \"\"\"\n provider = message['provider']\n\n included = app.settings.get('INCLUDED_PROVIDERS')\n if included:\n # If there is a list of included providers, it is the only thing\n # checked to determine whether or not to ignore the provider.\n if provider not in included:\n # If the provider isn't listed, ignore it.\n raise Abort('provider.ignored', message)\n elif provider in (app.settings.get('EXCLUDED_PROVIDERS') or ()):\n # If the provider is listed, ignore it.\n raise Abort('provider.ignored', message)\n\n return message\n\n\nasync def jsonify(app, message):\n \"\"\"Return an encoded dictionary.\n\n Args:\n app (henson.base.Application): The application.\n message (dict): The message to encode.\n\n Returns:\n bytes: The encoded message.\n\n \"\"\"\n return json.dumps(message).encode('utf-8')\n\n\ndef normalize_isrc(isrc):\n \"\"\"Return an ISRC in a normalized format.\n\n ISRCs can be displayed with dashes to make them easier to read. The\n values themselves, however, do not contain them. Before comparing\n one ISRC to another, any dashes should be stripped, and uppercased.\n\n Args:\n isrc (str): The ISRC value to be transformed.\n\n Returns:\n str: The normalized ISRC.\n\n .. versionadded:: 1.1.0\n\n \"\"\"\n return isrc.replace('-', '').upper()\n\n\ndef normalize_upc(upc):\n \"\"\"Return a UPC to be a normalized format.\n\n UPCs are 12-digit numeric codes. Longer values are generally GTINs\n and should contain 12-digit UPCs padding with leading zeros. These\n leading zeros can be stripped to create the UPC.\n\n UPCs longer than 12 digits that don't start with leading zeros will\n be returned without transformation.\n\n Args:\n upc (str): The UPC or GTIN value to be transformed.\n\n Returns:\n str: The normalized UPC.\n\n .. versionadded:: 1.1.0\n\n \"\"\"\n return upc.lstrip('0').zfill(12)\n\n\nasync def nosjify(app, message):\n \"\"\"Return a decoded dictionary.\n\n Args:\n app (henson.base.Application): The application.\n message: An object with an attribute called ``body`` containing\n the message to decode.\n\n Returns:\n dict: The decoded message.\n\n \"\"\"\n return json.loads(message.body.decode('utf-8'))\n\n\nasync def prepare_incoming_message(app, message):\n \"\"\"Prepare the incoming message with the common message structure.\n\n Messages have the following structure::\n\n {\n 'job_id': ...,\n 'ancestor_ids': ...,\n 'originated_at': ...,\n 'events': [\n {\n 'app': ...,\n 'event_id': ...,\n 'received_at': ...,\n },\n ],\n 'message': ...,\n }\n\n As part of preparing the message, ``job_id``, ``originated_at``, and\n ``events`` will be added if they don't exist. If ``events`` exists\n and contains events, ``message`` will be hoisted to the last event\n for archival purposes. A new event will be added.\n\n Args:\n app (henson.base.Application): The application instance that\n received the message.\n message (dict): The incoming message.\n\n Returns:\n dict: The prepared message.\n\n .. versionchanged:: 1.1.0\n\n The ``ancestor_ids`` key is added to messages.\n\n .. versionadded:: 1.0.0\n\n \"\"\"\n now = datetime.utcnow().isoformat()\n\n if not message.get('job_id'):\n message['job_id'] = str(uuid.uuid4())\n\n message.setdefault('ancestor_ids', [])\n\n if not message.get('originated_at'):\n message['originated_at'] = now\n\n if 'events' not in message:\n message['events'] = []\n\n message['events'].append({\n 'app': app.name,\n 'event_id': str(uuid.uuid4()),\n 'received_at': now,\n })\n\n return message\n\n\ndef prepare_outgoing_message(message):\n \"\"\"Return a message with the common message structure.\n\n Args:\n message (dict): The message to prepare.\n\n Returns:\n dict: The prepared message.\n\n .. versionchanged:: 1.0.0\n\n With the changes made to the common message structure, most of\n the functionality has been moved to\n :func:`prepare_incoming_message`. The ``app_name`` and ``event``\n arguments have been removed and the function has been renamed to\n better distinguish itself from :func:`prepare_incoming_message`.\n\n \"\"\"\n message['events'][-1]['updated_at'] = datetime.utcnow().isoformat()\n return message\n\n\nasync def send_error(message, *, producer):\n \"\"\"Send an error message.\n\n ``message`` will be updated with the common message structure and\n sent through the specified producer.\n\n Args:\n message (dict): The message to send.\n producer: The producer through which to send the message.\n\n \"\"\"\n # Preserve the incoming event.\n prepared_message = prepare_outgoing_message(message)\n # TODO: This should be done in a separate step.\n serialized_message = await jsonify(producer.app, prepared_message)\n await producer.error(serialized_message)\n\n\nasync def send_message(\n message, *, producer, exchange_name=None, routing_key=None):\n \"\"\"Send an outgoing message.\n\n ``message`` will be updated with the common message structure and\n sent through the specified producer.\n\n Args:\n message (dict): The message to send.\n producer: The product through which to send the message.\n routing_key (Optional[str]): The routing key to be passed\n through to the producer's ``send`` method. Defaults to\n ``None``.\n\n .. versionchanged:: 2.4.0\n\n Support for ``exchange_name`` added.\n \"\"\"\n prepared_message = prepare_outgoing_message(message)\n # TODO: This should be done in a separate step.\n serialized_message = await jsonify(producer.app, prepared_message)\n await producer.send(\n serialized_message,\n exchange_name=exchange_name,\n routing_key=routing_key,\n )\n" }, { "alpha_fraction": 0.6091954112052917, "alphanum_fraction": 0.6091954112052917, "avg_line_length": 21.98113250732422, "blob_id": "c6cb7c649f2c47cdbdb9a53ed85fbdbc5e0b7547", "content_id": "457089e61dd5c639a006de11cfd658a82ba6b0e5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1218, "license_type": "no_license", "max_line_length": 75, "num_lines": 53, "path": "/tests/conftest.py", "repo_name": "iheartradio/pipeline", "src_encoding": "UTF-8", "text": "\"\"\"Test configuration.\"\"\"\n\nimport pytest\n\n\nclass Application:\n \"\"\"A stub application that can be used for testing.\n\n Args:\n **settings: Keyword arguments that will be used as settings.\n \"\"\"\n\n def __init__(self, **settings):\n \"\"\"Initialize the instance.\"\"\"\n self.name = 'testing'\n self.settings = settings\n\n\nclass Producer:\n \"\"\"A stub producer that can be used for testing.\n\n Args:\n app: The application for which this producer produces.\n \"\"\"\n\n def __init__(self, app):\n \"\"\"Initialize the instance.\"\"\"\n self.app = app\n self.sent_error = None\n self.sent_message = None\n\n async def error(self, message):\n \"\"\"Mock send an error message.\"\"\"\n self.sent_error = message\n\n async def send(self, message, *, exchange_name=None, routing_key=None):\n \"\"\"Mock send a message.\"\"\"\n self.sent_message = message\n self.exchange_name = exchange_name\n self.routing_key = routing_key\n\n\[email protected]\ndef test_app():\n \"\"\"Return a test application.\"\"\"\n app = Application()\n return app\n\n\[email protected]\ndef test_producer(test_app):\n \"\"\"Return a test producer.\"\"\"\n return Producer(test_app)\n" }, { "alpha_fraction": 0.6534281373023987, "alphanum_fraction": 0.6574533581733704, "avg_line_length": 27.55555534362793, "blob_id": "881d7d1c0742b55960d0a03b9bc490790dde1d62", "content_id": "75dc42297eff51f460d27b513b978e2ac2afd235", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7453, "license_type": "no_license", "max_line_length": 79, "num_lines": 261, "path": "/tests/test_schema.py", "repo_name": "iheartradio/pipeline", "src_encoding": "UTF-8", "text": "\"\"\"Test the schemas.\"\"\"\n\nimport copy\nimport json\nimport os\n\nfrom henson.exceptions import Abort\nimport pytest\nimport voluptuous\n\nfrom pipeline import schema\n\n\ndef load_json(filename):\n \"\"\"Return a parsed JSON file.\n\n Args:\n filename (str): The relative path to the JSON file.\n\n Returns:\n dict: The parsed JSON file.\n \"\"\"\n filepath = os.path.join(\n os.path.dirname(os.path.abspath(__file__)),\n 'data',\n 'schema',\n filename)\n with open(filepath) as f:\n doc = json.loads(f.read())\n return doc\n\n\[email protected]('type_', (\n 'AdvertisementSupportedModel',\n 'DeviceFeeModel',\n 'PayAsYouGoModel',\n 'RightsClaimModel',\n 'SubscriptionModel',\n))\ndef test_commercialmodeltype(type_):\n \"\"\"Test that a type is a CommercialModelType.\"\"\"\n assert schema.CommercialModelType(type_) == type_\n\n\ndef test_empty_document():\n \"\"\"Test that an empty document doesn't validate.\"\"\"\n doc = load_json('empty.json')\n with pytest.raises(schema.MultipleInvalid):\n schema.track_bundle(doc)\n\n\[email protected]('message', (\n {},\n {'action': 'takedown'},\n {'amwKey': '123'},\n {'action': 'upsert', 'amwKey': '123'},\n))\ndef test_invalid_takedown(message):\n \"\"\"Test invalid takedown delivery.\"\"\"\n with pytest.raises(Abort):\n schema.validate_schema(schema.takedown, message)\n\n\ndef test_invalid_track_action():\n \"\"\"Test that a bad track action doesn't validate.\"\"\"\n doc = load_json('invalid-track-action.json')\n with pytest.raises(schema.MultipleInvalid):\n schema.track_bundle(doc)\n\n\ndef test_invalid_track_bundle_action():\n \"\"\"Test that a bad track bundle action doesn't validate.\"\"\"\n doc = load_json('invalid-track-bundle-action.json')\n with pytest.raises(schema.MultipleInvalid):\n schema.track_bundle(doc)\n\n\ndef test_iter_errors_multipleinvalid():\n \"\"\"Test that iter_errors handles multiple errors.\"\"\"\n data = {'a': '1'}\n test_schema = voluptuous.Schema({'a': int, 'b': str}, required=True)\n try:\n test_schema(data)\n except schema.MultipleInvalid as e:\n errors = list(schema.iter_errors(e, data))\n\n assert len(errors) == 2\n\n\ndef test_iter_errors_requiredfieldinvalid():\n \"\"\"Test iter_errors with a required field.\"\"\"\n data = {'a': 1}\n test_schema = voluptuous.Schema({'a': int, 'b': str}, required=True)\n try:\n test_schema(data)\n except schema.MultipleInvalid as e:\n error = next(schema.iter_errors(e, data))\n\n assert isinstance(error.error, voluptuous.RequiredFieldInvalid)\n assert error.message == str(error.error)\n assert error.value is None\n\n\ndef test_iter_errors_typeinvalid():\n \"\"\"Test iter_errors with an invalid type.\"\"\"\n data = {'a': '1'}\n test_schema = voluptuous.Schema({'a': int})\n try:\n test_schema(data)\n except schema.MultipleInvalid as e:\n error = next(schema.iter_errors(e, data))\n\n assert isinstance(error.error, schema.TypeInvalid)\n assert error.message.endswith('got str')\n assert error.value == '1'\n\n\ndef test_minimal_takendown():\n \"\"\"Test that a valid minimal takedown passes validation.\"\"\"\n expected = {'action': 'takedown', 'amwKey': '123'}\n actual = schema.validate_schema(schema.takedown, expected)\n assert actual == expected\n\n\ndef test_no_track_bundle_amwkey():\n \"\"\"Test that a missing track bundle amwkey doesn't validate.\"\"\"\n doc = load_json('invalid-track-bundle-amwkey.json')\n with pytest.raises(schema.MultipleInvalid):\n schema.track_bundle(doc)\n\n\ndef test_no_track_bundle_provider():\n \"\"\"Test that a missing track bundle provider doesn't validate.\"\"\"\n doc = load_json('invalid-track-bundle-provider.json')\n with pytest.raises(schema.MultipleInvalid):\n schema.track_bundle(doc)\n\n\ndef test_no_track_bundle_title():\n \"\"\"Test that a missing track bundle title doesn't validate.\"\"\"\n doc = load_json('invalid-track-bundle-title.json')\n with pytest.raises(schema.MultipleInvalid):\n schema.track_bundle(doc)\n\n\ndef test_no_track_isrc():\n \"\"\"Test that a missing track ISRC doesn't validate.\"\"\"\n doc = load_json('invalid-track-isrc.json')\n with pytest.raises(schema.MultipleInvalid):\n schema.track_bundle(doc)\n\n\[email protected]('type_', (\n 'testing',\n 'invalidvalue',\n 'this should fail',\n '123',\n '$5',\n))\ndef test_raises_commercialmodeltypeinvalid(type_):\n \"\"\"Test that invalid commercial models raise CommercialModelTypeInvalid.\"\"\"\n assert type_ not in schema.COMMERCIAL_MODEL_TYPES\n with pytest.raises(schema.CommercialModelTypeInvalid):\n assert schema.CommercialModelType(type_) == type_\n\n\[email protected]('type_', (\n ['testing1', 'testing2'],\n ['this should fail'],\n ['fail1', 'fail2', 'fail3'],\n ['invalid'],\n))\ndef test_raises_usetypeinvalid(type_):\n \"\"\"Test that invalid use types raise UseTypeInvalid.\"\"\"\n assert type_ not in schema.USE_TYPES\n with pytest.raises(schema.UseTypeInvalid):\n assert schema.UseType(type_) == type_\n\n\ndef test_takedown():\n \"\"\"Test that a valid takedown passes validation.\"\"\"\n expected = load_json('valid.json')\n expected['action'] = 'takedown'\n actual = schema.validate_schema(schema.takedown, expected)\n assert actual == expected\n\n\ndef test_valid():\n \"\"\"Test a valid document.\"\"\"\n doc = load_json('valid.json')\n assert schema.track_bundle(doc) == doc\n\n\ndef test_upsert():\n \"\"\"Test the upsert schema.\"\"\"\n expected = load_json('valid.json')\n actual = schema.validate_schema(schema.delivery, expected)\n assert actual == expected\n\n\[email protected]('type_', (\n ['ConditionalDownload'],\n ['NonInteractiveStream'],\n ['OnDemandStream'],\n ['PermanentDownload'],\n))\ndef test_usetype(type_):\n \"\"\"Test that a type is a UseType.\"\"\"\n assert schema.UseType(type_) == type_\n\n\[email protected]('schema_, expected', (\n (voluptuous.Schema(str), 'a'),\n (voluptuous.Schema([int]), [1, 2]),\n (\n voluptuous.Schema({'a': int}, extra=voluptuous.ALLOW_EXTRA),\n {'a': 1, 'b': 'c'}\n ),\n))\ndef test_validate_message(schema_, expected):\n \"\"\"Test a message that validates its schema.\"\"\"\n actual = schema.validate_schema(schema_, expected)\n assert actual == expected\n\n\[email protected]('schema_, message', (\n (voluptuous.Schema(str), 1),\n (voluptuous.Schema([int]), [1, 'a']),\n (voluptuous.Schema({'a': int}), {'a': 1, 'b': 2}),\n))\ndef test_validate_message_invalid(schema_, message):\n \"\"\"Test that invalid messages fail to validate.\"\"\"\n with pytest.raises(Abort):\n schema.validate_schema(schema_, message)\n\n\ndef test_valid_offer():\n \"\"\"Test a valid offer.\"\"\"\n doc = load_json('valid-offer.json')\n assert schema.offer(doc) == doc\n\n\[email protected]('message', [\n 'invalid-offer-commercial-model-type.json',\n 'invalid-offer-licensee.json',\n 'invalid-offer-territory-code.json',\n 'invalid-offer-use-type.json',\n 'invalid-offer-valid-from.json',\n 'invalid-offer-valid-to.json',\n])\ndef test_invalid_offer(message):\n \"\"\"Test that invalid offers fail to validate.\"\"\"\n doc = load_json(message)\n with pytest.raises(schema.MultipleInvalid):\n schema.offer(doc)\n\n\ndef test_valid_null_fields():\n \"\"\"Test a valid document with optional null fields.\"\"\"\n doc = load_json('valid-nulled-schema.json')\n assert schema.track_bundle(doc) == doc\n" }, { "alpha_fraction": 0.6363636255264282, "alphanum_fraction": 0.6363636255264282, "avg_line_length": 10, "blob_id": "2667d083036b8bc897e639b25d668729e2e2b2ee", "content_id": "cd18038a6538a87e71a6fc87c1823ffd5113d3ff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 88, "license_type": "no_license", "max_line_length": 35, "num_lines": 8, "path": "/docs/api.rst", "repo_name": "iheartradio/pipeline", "src_encoding": "UTF-8", "text": "===\nAPI\n===\n\nHere's the public API for pipeline.\n\n.. automodule:: pipeline\n :members:\n" }, { "alpha_fraction": 0.5408191680908203, "alphanum_fraction": 0.5408191680908203, "avg_line_length": 34.186275482177734, "blob_id": "a00e9db340f969319e06aa379c06aa0f90fc3502", "content_id": "6e31d91ee76ffc531572d5b4697bb43c7e642592", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 3589, "license_type": "no_license", "max_line_length": 200, "num_lines": 102, "path": "/README.rst", "repo_name": "iheartradio/pipeline", "src_encoding": "UTF-8", "text": "========\nPipeline\n========\n\nCommon utilities used by the Ingestion Pipeline.\n\n-----------------------\nDeploying a new version\n-----------------------\n\nTo manually deploy/test a new version:\n\n* Increment the version in setup.py, make sure CodeArtifact\n doesn't already have a repo for that version.\n\n* Make sure `dist` directory is empty, then follow instructions [here](https://github.com/iheartradio/content-platform-documentation/blob/master/private_python_modules/README.md#publishing-with-twine)\n\nWhen a branch is merged to master, a Travis job will\nbuild and deploy the version that's in `setup.py`.\n\n**WARNING:**\nIf you don't delete the version you uploaded for testing, and it conflicts with the final `setup.py` version when the\nbranch is merged, this will result in a conflict error and the build will fail.\n\nIf you redeploy a new version of the same version tag,\nclear pip cache in dependent repos:\n`rm -rf ~/Library/Caches/pip/*`\n\n--------\nDiagrams\n--------\n\nThese are service level flow charts showing incoming rabbitmq exchanges,\nthe actual service itself, outgoing exchange, and potentially outgoing queues.\n\nConfiguration\n=============\n\n+---------------------------------+------------------------------------------+\n| ``DIAGRAMS_SERVICE_NAME`` | Name of the service the diagram is being |\n| | created for. |\n| | |\n+---------------------------------+------------------------------------------+\n| ``DIAGRAMS_INCOMING_EXCHANGES`` | Dictionary with the key indicating the |\n| | incoming exchange, the value |\n| | representing the routing key. |\n+---------------------------------+------------------------------------------+\n| ``DIAGRAMS_OUTGOING_EXCHANGE`` | Outgoing exchange the message is sent to |\n| | after being processed by the service. |\n+---------------------------------+------------------------------------------+\n| ``DIAGRAMS_OUTGOING_QUEUES`` | Dictionary of outgoing queues, with the |\n| | key indicating the outgoing queue, the |\n| | value representing the routing key. |\n+---------------------------------+------------------------------------------+\n\nAn example configuration\n========================\n\nLet's say we want to generate a diagram of the ``persist`` service:\n\n.. code-block:: python\n\n DIAGRAMS_SERVICE_NAME: str = 'persist'\n DIAGRAMS_INCOMING_EXCHANGES: dict = {\n 'coalesced': 'nonfetchable',\n 'fetched': 'feched',\n 'objectified': 'nonfetchable',\n }\n DIAGRAMS_OUTGOING_EXCHANGE = 'persisted'\n DIAGRAMS_OUTGOING_QUEUES: dict = {\n 'am-search': '#',\n 'leroy': 'upsert.#',\n 'logstash': '#',\n 'pre-audit': '#.trackbundle',\n 'reconcile': '#.trackbundle',\n 'takedown': 'takedown.#',\n }\n\nGenerating a new diagram\n========================\n\nTo generate the diagram, you would just need to have a\nfile like the following in your project's root directory:\n\n.. code-block:: python\n\n \"\"\"Generates Bindings Diagram.\"\"\"\n\n from publish import settings\n from pipeline.diagrams import generate_bindings_diagram\n\n generate_bindings_diagram(settings)\n\nThen run the following from the root folder:\n\n.. code-block:: bash\n\n $ python generate_bindings_diagram.py\n\nThis would generate a diagram like so:\n\n.. image:: docs/images/bindings.png\n" }, { "alpha_fraction": 0.7606298923492432, "alphanum_fraction": 0.8157480359077454, "avg_line_length": 62.5, "blob_id": "ec0beb7db33f7d1d73ce5cca0e5f34e4403113b3", "content_id": "b36ae4b1614fd4ecf433071256950419b1b54453", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 635, "license_type": "no_license", "max_line_length": 159, "num_lines": 10, "path": "/scripts/deploy.sh", "repo_name": "iheartradio/pipeline", "src_encoding": "UTF-8", "text": "#!/bin/sh\nexport TWINE_USERNAME=aws\nexport TWINE_PASSWORD=`aws codeartifact get-authorization-token --domain content-platform --domain-owner 827541288795 --query authorizationToken --output text`\nexport TWINE_REPOSITORY_URL=https://content-platform-827541288795.d.codeartifact.us-east-1.amazonaws.com/pypi/content-platform/\n# upgrading pip to resolve dependency issue\n# https://travis-ci.community/t/cant-deploy-to-pypi-anymore-pkg-resources-contextualversionconflict-importlib-metadata-0-18/10494/26\npip install --upgrade pip\npip install twine\npython setup.py sdist bdist_wheel\ntwine upload --verbose --repository pipeline-ihr dist/*\n" } ]
16
lllate/DECISION
https://github.com/lllate/DECISION
996f3ca274724c7b2e62991e8940b1c776792b64
73a3fb3c34e7854ae974b2ba8a5debc940e86e16
3f963af3c0c59e9ac9fb930b0d29a0cf986555c6
refs/heads/main
2023-05-11T00:38:25.942601
2021-05-26T06:25:37
2021-05-26T06:25:37
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6385372877120972, "alphanum_fraction": 0.6413502097129822, "avg_line_length": 27.479999542236328, "blob_id": "6263a99f47e8f43d2d13d491951d6d5529de71f9", "content_id": "8198edeefeff7b7339aadd0ad6f8c4abe7ef771a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 711, "license_type": "no_license", "max_line_length": 68, "num_lines": 25, "path": "/gen_list.py", "repo_name": "lllate/DECISION", "src_encoding": "UTF-8", "text": "import os \n\ndataset = 'office-home'\n\nif dataset == 'office':\n\tdomains = ['amazon', 'dslr', 'webcam']\nelif dataset == 'office-caltech':\n\tdomains = ['amazon', 'dslr', 'webcam', 'caltech']\nelif dataset == 'office-home':\n\tdomains = ['Art', 'Clipart', 'Product', 'Real_World']\nelse:\n\tprint('No such dataset exists!')\n\nfor domain in domains:\n\tlog = open(dataset+'/'+domain+'_list.txt','w')\n\tdirectory = os.path.join(dataset, os.path.join(domain,'images'))\n\tclasses = [x[0] for x in os.walk(directory)]\n\tclasses = classes[1:]\n\tclasses.sort()\n\tfor idx,f in enumerate(classes):\n\t\tfiles = os.listdir(f)\n\t\tfor file in files:\n\t\t\ts = os.path.abspath(os.path.join(f,file)) + ' ' + str(idx) + '\\n'\n\t\t\tlog.write(s)\n\tlog.close()" }, { "alpha_fraction": 0.5658896565437317, "alphanum_fraction": 0.580637514591217, "avg_line_length": 40.93266677856445, "blob_id": "84e417248b933532b09e64fd6e188f7296fac98f", "content_id": "dd0282c0077d2e100a9556d7b3012dc90c09a64c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 16816, "license_type": "no_license", "max_line_length": 171, "num_lines": 401, "path": "/adapt_multi.py", "repo_name": "lllate/DECISION", "src_encoding": "UTF-8", "text": "import argparse\nimport os, sys\nimport os.path as osp\nimport torchvision\nimport numpy as np\nimport torch\nimport torch.nn as nn\nimport torch.optim as optim\nfrom torchvision import transforms\nimport network, loss\nfrom torch.utils.data import DataLoader\nfrom data_list import ImageList, ImageList_idx\nimport random, pdb, math, copy\nfrom tqdm import tqdm\nfrom scipy.spatial.distance import cdist\nfrom sklearn.metrics import confusion_matrix\n\ndef op_copy(optimizer):\n for param_group in optimizer.param_groups:\n param_group['lr0'] = param_group['lr']\n return optimizer\n\ndef lr_scheduler(optimizer, iter_num, max_iter, gamma=10, power=0.75):\n decay = (1 + gamma * iter_num / max_iter) ** (-power)\n for param_group in optimizer.param_groups:\n param_group['lr'] = param_group['lr0'] * decay\n param_group['weight_decay'] = 1e-3\n param_group['momentum'] = 0.9\n param_group['nesterov'] = True\n return optimizer\n\ndef image_train(resize_size=256, crop_size=224, alexnet=False):\n if not alexnet:\n normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],\n std=[0.229, 0.224, 0.225])\n else:\n normalize = Normalize(meanfile='./ilsvrc_2012_mean.npy')\n return transforms.Compose([\n transforms.Resize((resize_size, resize_size)),\n transforms.RandomCrop(crop_size),\n transforms.RandomHorizontalFlip(),\n transforms.ToTensor(),\n normalize\n ])\n\ndef image_test(resize_size=256, crop_size=224, alexnet=False):\n if not alexnet:\n normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],\n std=[0.229, 0.224, 0.225])\n else:\n normalize = Normalize(meanfile='./ilsvrc_2012_mean.npy')\n return transforms.Compose([\n transforms.Resize((resize_size, resize_size)),\n transforms.CenterCrop(crop_size),\n transforms.ToTensor(),\n normalize\n ])\n\ndef data_load(args): \n ## prepare data\n dsets = {}\n dset_loaders = {}\n train_bs = args.batch_size\n txt_tar = open(args.t_dset_path).readlines()\n txt_test = open(args.test_dset_path).readlines()\n\n dsets[\"target\"] = ImageList_idx(txt_tar, transform=image_train())\n dset_loaders[\"target\"] = DataLoader(dsets[\"target\"], batch_size=train_bs, shuffle=True, num_workers=args.worker, drop_last=False)\n dsets['target_'] = ImageList_idx(txt_tar, transform=image_train())\n dset_loaders['target_'] = DataLoader(dsets['target_'], batch_size=train_bs*3, shuffle=False, num_workers=args.worker, drop_last=False)\n dsets[\"test\"] = ImageList_idx(txt_test, transform=image_test())\n dset_loaders[\"test\"] = DataLoader(dsets[\"test\"], batch_size=train_bs*3, shuffle=False, num_workers=args.worker, drop_last=False)\n\n return dset_loaders\n\ndef train_target(args):\n dset_loaders = data_load(args)\n ## set base network\n if args.net[0:3] == 'res':\n netF_list = [network.ResBase(res_name=args.net).cuda() for i in range(len(args.src))]\n elif args.net[0:3] == 'vgg':\n netF_list = [network.VGGBase(vgg_name=args.net).cuda() for i in range(len(args.src))] \n\n w = 2*torch.rand((len(args.src),))-1\n print(w)\n\n netB_list = [network.feat_bottleneck(type=args.classifier, feature_dim=netF_list[i].in_features, bottleneck_dim=args.bottleneck).cuda() for i in range(len(args.src))] \n netC_list = [network.feat_classifier(type=args.layer, class_num = args.class_num, bottleneck_dim=args.bottleneck).cuda() for i in range(len(args.src))]\n netG_list = [network.scalar(w[i]).cuda() for i in range(len(args.src))]\n\n param_group = []\n for i in range(len(args.src)):\n modelpath = args.output_dir_src[i] + '/source_F.pt'\n print(modelpath)\n netF_list[i].load_state_dict(torch.load(modelpath))\n netF_list[i].eval()\n for k, v in netF_list[i].named_parameters():\n param_group += [{'params':v, 'lr':args.lr * args.lr_decay1}]\n\n modelpath = args.output_dir_src[i] + '/source_B.pt'\n print(modelpath)\n netB_list[i].load_state_dict(torch.load(modelpath))\n netB_list[i].eval()\n for k, v in netB_list[i].named_parameters():\n param_group += [{'params':v, 'lr':args.lr * args.lr_decay2}]\n\n modelpath = args.output_dir_src[i] + '/source_C.pt'\n print(modelpath)\n netC_list[i].load_state_dict(torch.load(modelpath))\n netC_list[i].eval()\n for k, v in netC_list[i].named_parameters():\n v.requires_grad = False\n\n for k, v in netG_list[i].named_parameters():\n param_group += [{'params':v, 'lr':args.lr}]\n \n optimizer = optim.SGD(param_group)\n optimizer = op_copy(optimizer)\n\n max_iter = args.max_epoch * len(dset_loaders[\"target\"])\n interval_iter = max_iter // args.interval\n iter_num = 0\n\n c = 0\n\n while iter_num < max_iter:\n try:\n inputs_test, _, tar_idx = iter_test.next()\n except:\n iter_test = iter(dset_loaders[\"target\"])\n inputs_test, _, tar_idx = iter_test.next()\n\n if inputs_test.size(0) == 1:\n continue\n\n if iter_num % interval_iter == 0 and args.cls_par > 0:\n initc = []\n all_feas = []\n for i in range(len(args.src)):\n netF_list[i].eval()\n netB_list[i].eval()\n temp1, temp2 = obtain_label(dset_loaders['target_'], netF_list[i], netB_list[i], netC_list[i], args)\n temp1 = torch.from_numpy(temp1).cuda()\n temp2 = torch.from_numpy(temp2).cuda()\n initc.append(temp1)\n all_feas.append(temp2)\n netF_list[i].train()\n netB_list[i].train()\n\n inputs_test = inputs_test.cuda()\n\n iter_num += 1\n lr_scheduler(optimizer, iter_num=iter_num, max_iter=max_iter)\n\n outputs_all = torch.zeros(len(args.src), inputs_test.shape[0], args.class_num)\n weights_all = torch.ones(inputs_test.shape[0], len(args.src))\n outputs_all_w = torch.zeros(inputs_test.shape[0], args.class_num)\n init_ent = torch.zeros(1,len(args.src))\n\n for i in range(len(args.src)):\n features_test = netB_list[i](netF_list[i](inputs_test))\n outputs_test = netC_list[i](features_test)\n softmax_ = nn.Softmax(dim=1)(outputs_test)\n ent_loss = torch.mean(loss.Entropy(softmax_))\n init_ent[:,i] = ent_loss\n weights_test = netG_list[i](features_test)\n outputs_all[i] = outputs_test\n weights_all[:, i] = weights_test.squeeze()\n\n z = torch.sum(weights_all, dim=1)\n z = z + 1e-16\n\n weights_all = torch.transpose(torch.transpose(weights_all,0,1)/z,0,1)\n outputs_all = torch.transpose(outputs_all, 0, 1)\n\n z_ = torch.sum(weights_all, dim=0)\n \n z_2 = torch.sum(weights_all)\n z_ = z_/z_2\n \n for i in range(inputs_test.shape[0]):\n outputs_all_w[i] = torch.matmul(torch.transpose(outputs_all[i],0,1), weights_all[i])\n \n if args.cls_par > 0:\n initc_ = torch.zeros(initc[0].size()).cuda()\n temp = all_feas[0]\n all_feas_ = torch.zeros(temp[tar_idx, :].size()).cuda()\n for i in range(len(args.src)):\n initc_ = initc_ + z_[i] * initc[i].float()\n src_fea = all_feas[i]\n all_feas_ = all_feas_ + z_[i] * src_fea[tar_idx, :]\n dd = torch.cdist(all_feas_.float(), initc_.float(), p=2)\n pred_label = dd.argmin(dim=1)\n pred_label = pred_label.int()\n pred = pred_label.long()\n classifier_loss = args.cls_par * nn.CrossEntropyLoss()(outputs_all_w, pred.cpu())\n else:\n classifier_loss = torch.tensor(0.0)\n\n if args.ent:\n softmax_out = nn.Softmax(dim=1)(outputs_all_w)\n entropy_loss = torch.mean(loss.Entropy(softmax_out))\n if args.gent:\n msoftmax = softmax_out.mean(dim=0)\n entropy_loss -= torch.sum(-msoftmax * torch.log(msoftmax + 1e-5))\n\n im_loss = entropy_loss * args.ent_par\n classifier_loss += im_loss\n\n optimizer.zero_grad()\n classifier_loss.backward()\n optimizer.step()\n\n if iter_num % interval_iter == 0 or iter_num == max_iter:\n for i in range(len(args.src)):\n netF_list[i].eval()\n netB_list[i].eval()\n acc, _ = cal_acc_multi(dset_loaders['test'], netF_list, netB_list, netC_list, netG_list, args)\n log_str = 'Iter:{}/{}; Accuracy = {:.2f}%'.format(iter_num, max_iter, acc)\n print(log_str+'\\n')\n for i in range(len(args.src)):\n torch.save(netF_list[i].state_dict(), osp.join(args.output_dir, \"target_F_\" + str(i) + \"_\" + args.savename + \".pt\"))\n torch.save(netB_list[i].state_dict(), osp.join(args.output_dir, \"target_B_\" + str(i) + \"_\" + args.savename + \".pt\"))\n torch.save(netC_list[i].state_dict(), osp.join(args.output_dir, \"target_C_\" + str(i) + \"_\" + args.savename + \".pt\"))\n torch.save(netG_list[i].state_dict(), osp.join(args.output_dir, \"target_G_\" + str(i) + \"_\" + args.savename + \".pt\"))\n\ndef obtain_label(loader, netF, netB, netC, args):\n start_test = True\n with torch.no_grad():\n iter_test = iter(loader)\n for _ in range(len(loader)):\n data = iter_test.next()\n inputs = data[0]\n labels = data[1]\n inputs = inputs.cuda()\n feas = netB(netF(inputs.float()))\n outputs = netC(feas)\n if start_test:\n all_fea = feas.float().cpu()\n all_output = outputs.float().cpu()\n all_label = labels.float()\n start_test = False\n else:\n all_fea = torch.cat((all_fea, feas.float().cpu()), 0)\n all_output = torch.cat((all_output, outputs.float().cpu()), 0)\n all_label = torch.cat((all_label, labels.float()), 0)\n all_output = nn.Softmax(dim=1)(all_output)\n _, predict = torch.max(all_output, 1)\n accuracy = torch.sum(torch.squeeze(predict).float() == all_label).item() / float(all_label.size()[0])\n \n all_fea = torch.cat((all_fea, torch.ones(all_fea.size(0), 1)), 1)\n all_fea = (all_fea.t() / torch.norm(all_fea, p=2, dim=1)).t()\n all_fea = all_fea.float().cpu().numpy()\n\n K = all_output.size(1)\n aff = all_output.float().cpu().numpy()\n initc = aff.transpose().dot(all_fea)\n initc = initc / (1e-8 + aff.sum(axis=0)[:,None])\n\n dd = cdist(all_fea, initc, 'cosine')\n pred_label = dd.argmin(axis=1)\n acc = np.sum(pred_label == all_label.float().numpy()) / len(all_fea)\n\n for round in range(1):\n aff = np.eye(K)[pred_label]\n initc = aff.transpose().dot(all_fea)\n initc = initc / (1e-8 + aff.sum(axis=0)[:,None])\n dd = cdist(all_fea, initc, 'cosine')\n pred_label = dd.argmin(axis=1)\n acc = np.sum(pred_label == all_label.float().numpy()) / len(all_fea)\n\n log_str = 'Accuracy = {:.2f}% -> {:.2f}%'.format(accuracy*100, acc*100)\n print(log_str+'\\n')\n #return pred_label.astype('int')\n return initc,all_fea\n\n\ndef cal_acc_multi(loader, netF_list, netB_list, netC_list, netG_list, args):\n start_test = True\n with torch.no_grad():\n iter_test = iter(loader)\n for _ in range(len(loader)):\n data = iter_test.next()\n inputs = data[0]\n labels = data[1]\n inputs = inputs.cuda()\n outputs_all = torch.zeros(len(args.src), inputs.shape[0], args.class_num)\n weights_all = torch.ones(inputs.shape[0], len(args.src))\n outputs_all_w = torch.zeros(inputs.shape[0], args.class_num)\n \n for i in range(len(args.src)):\n features = netB_list[i](netF_list[i](inputs))\n outputs = netC_list[i](features)\n weights = netG_list[i](features)\n outputs_all[i] = outputs\n weights_all[:, i] = weights.squeeze()\n\n z = torch.sum(weights_all, dim=1)\n z = z + 1e-16\n\n weights_all = torch.transpose(torch.transpose(weights_all,0,1)/z,0,1)\n print(weights_all.mean(dim=0))\n outputs_all = torch.transpose(outputs_all, 0, 1)\n\n for i in range(inputs.shape[0]):\n outputs_all_w[i] = torch.matmul(torch.transpose(outputs_all[i],0,1), weights_all[i])\n\n if start_test:\n all_output = outputs_all_w.float().cpu()\n all_label = labels.float()\n start_test = False\n else:\n all_output = torch.cat((all_output, outputs_all_w.float().cpu()), 0)\n all_label = torch.cat((all_label, labels.float()), 0)\n _, predict = torch.max(all_output, 1)\n accuracy = torch.sum(torch.squeeze(predict).float() == all_label).item() / float(all_label.size()[0])\n mean_ent = torch.mean(loss.Entropy(nn.Softmax(dim=1)(all_output))).cpu().data.item()\n return accuracy*100, mean_ent\n\ndef print_args(args):\n s = \"==========================================\\n\"\n for arg, content in args.__dict__.items():\n s += \"{}:{}\\n\".format(arg, content)\n return s\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser(description='SHOT')\n parser.add_argument('--gpu_id', type=str, nargs='?', default='0', help=\"device id to run\")\n parser.add_argument('--t', type=int, default=0, help=\"target\") ## Choose which domain to set as target {0 to len(names)-1}\n parser.add_argument('--max_epoch', type=int, default=15, help=\"max iterations\")\n parser.add_argument('--interval', type=int, default=15)\n parser.add_argument('--batch_size', type=int, default=32, help=\"batch_size\")\n parser.add_argument('--worker', type=int, default=4, help=\"number of workers\")\n parser.add_argument('--dset', type=str, default='office-caltech', choices=['office', 'office-home', 'office-caltech'])\n parser.add_argument('--lr', type=float, default=1*1e-2, help=\"learning rate\")\n parser.add_argument('--net', type=str, default='resnet50', help=\"vgg16, resnet50, res101\")\n parser.add_argument('--seed', type=int, default=2021, help=\"random seed\")\n \n parser.add_argument('--gent', type=bool, default=True)\n parser.add_argument('--ent', type=bool, default=True)\n parser.add_argument('--threshold', type=int, default=0)\n parser.add_argument('--cls_par', type=float, default=0.3)\n parser.add_argument('--ent_par', type=float, default=1.0)\n parser.add_argument('--lr_decay1', type=float, default=0.1)\n parser.add_argument('--lr_decay2', type=float, default=1.0)\n\n parser.add_argument('--bottleneck', type=int, default=256)\n parser.add_argument('--epsilon', type=float, default=1e-5)\n parser.add_argument('--layer', type=str, default=\"wn\", choices=[\"linear\", \"wn\"])\n parser.add_argument('--classifier', type=str, default=\"bn\", choices=[\"ori\", \"bn\"])\n parser.add_argument('--distance', type=str, default='cosine', choices=[\"euclidean\", \"cosine\"]) \n parser.add_argument('--output', type=str, default='ckps/adapt_ours')\n parser.add_argument('--output_src', type=str, default='ckps/source')\n args = parser.parse_args()\n \n if args.dset == 'office-home':\n names = ['Art', 'Clipart', 'Product', 'Real_World']\n args.class_num = 65\n if args.dset == 'office':\n names = ['amazon', 'dslr' , 'webcam']\n args.class_num = 31\n if args.dset == 'office-caltech':\n names = ['amazon', 'caltech', 'dslr', 'webcam']\n args.class_num = 10\n\n args.src = []\n for i in range(len(names)):\n if i == args.t:\n continue\n else:\n args.src.append(names[i])\n\n os.environ[\"CUDA_VISIBLE_DEVICES\"] = args.gpu_id\n SEED = args.seed\n torch.manual_seed(SEED)\n torch.cuda.manual_seed(SEED)\n np.random.seed(SEED)\n random.seed(SEED)\n\n for i in range(len(names)):\n if i != args.t:\n continue\n folder = './data/'\n args.t_dset_path = folder + args.dset + '/' + names[args.t] + '_list.txt'\n args.test_dset_path = folder + args.dset + '/' + names[args.t] + '_list.txt'\n print(args.t_dset_path)\n\n args.output_dir_src = []\n for i in range(len(args.src)):\n args.output_dir_src.append(osp.join(args.output_src, args.dset, args.src[i][0].upper()))\n print(args.output_dir_src)\n args.output_dir = osp.join(args.output, args.dset, names[args.t][0].upper())\n\n if not osp.exists(args.output_dir):\n os.system('mkdir -p ' + args.output_dir)\n if not osp.exists(args.output_dir):\n os.mkdir(args.output_dir)\n\n args.savename = 'par_' + str(args.cls_par)\n\n train_target(args)\n\n" }, { "alpha_fraction": 0.5939298868179321, "alphanum_fraction": 0.6082081198692322, "avg_line_length": 39.66261291503906, "blob_id": "b873f060060641c712c34a10f2cb551d0a20afbe", "content_id": "876075fc39922c1b18596d2b965e1772ef9e525f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13377, "license_type": "no_license", "max_line_length": 171, "num_lines": 329, "path": "/distill.py", "repo_name": "lllate/DECISION", "src_encoding": "UTF-8", "text": "import argparse\nimport os, sys\nimport os.path as osp\nimport torchvision\nimport numpy as np\nimport torch\nimport torch.nn as nn\nimport torch.optim as optim\nfrom torchvision import transforms\nimport network, loss\nfrom torch.utils.data import DataLoader\nfrom data_list import ImageList, ImageList_idx\nimport random, pdb, math, copy\nfrom tqdm import tqdm\nfrom scipy.spatial.distance import cdist\nfrom sklearn.metrics import confusion_matrix\nfrom loss import softCrossEntropy\n\n\ndef op_copy(optimizer):\n for param_group in optimizer.param_groups:\n param_group['lr0'] = param_group['lr']\n return optimizer\n\ndef lr_scheduler(optimizer, iter_num, max_iter, gamma=10, power=0.75):\n decay = (1 + gamma * iter_num / max_iter) ** (-power)\n for param_group in optimizer.param_groups:\n param_group['lr'] = param_group['lr0'] * decay\n param_group['weight_decay'] = 1e-3\n param_group['momentum'] = 0.9\n param_group['nesterov'] = True\n return optimizer\n\ndef get_labels(inputs, netF_list, netB_list, netC_list, netG_list):\n with torch.no_grad():\n inputs = inputs.cuda()\n outputs_all = torch.zeros(len(args.src), inputs.shape[0], args.class_num)\n weights_all = torch.ones(inputs.shape[0], len(args.src))\n outputs_all_w = torch.zeros(inputs.shape[0], args.class_num)\n\n for i in range(len(args.src)):\n \tfeatures = netB_list[i](netF_list[i](inputs))\n \toutputs = netC_list[i](features)\n \tweights = netG_list[i](features)\n \toutputs_all[i] = outputs\n \tweights_all[:, i] = weights.squeeze()\n\n z = torch.sum(weights_all, dim=1)\n z = z + 1e-16\n\n weights_all = torch.transpose(torch.transpose(weights_all,0,1)/z,0,1)\n # print(weights_all.mean(dim=0))\n outputs_all = torch.transpose(outputs_all, 0, 1)\n for i in range(inputs.shape[0]):\n outputs_all_w[i] = torch.matmul(torch.transpose(outputs_all[i],0,1), weights_all[i])\n\n all_output = outputs_all_w.float().cpu()\n \n _, predict = torch.max(all_output, 1)\n\n return predict, all_output\n\ndef data_load(args): \n ## prepare data\n dsets = {}\n dset_loaders = {}\n train_bs = args.batch_size\n txt_tar = open(args.t_dset_path).readlines()\n txt_test = open(args.test_dset_path).readlines()\n\n dsets[\"target\"] = ImageList_idx(txt_tar, transform=image_train())\n dset_loaders[\"target\"] = DataLoader(dsets[\"target\"], batch_size=train_bs, shuffle=True, num_workers=args.worker, drop_last=False)\n dsets['target_'] = ImageList_idx(txt_tar, transform=image_train())\n dset_loaders['target_'] = DataLoader(dsets['target_'], batch_size=train_bs*3, shuffle=False, num_workers=args.worker, drop_last=False)\n dsets[\"test\"] = ImageList_idx(txt_test, transform=image_test())\n dset_loaders[\"test\"] = DataLoader(dsets[\"test\"], batch_size=train_bs*3, shuffle=False, num_workers=args.worker, drop_last=False)\n return dset_loaders\n\ndef image_train(resize_size=256, crop_size=224, alexnet=False):\n if not alexnet:\n normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],\n std=[0.229, 0.224, 0.225])\n else:\n normalize = Normalize(meanfile='./ilsvrc_2012_mean.npy')\n return transforms.Compose([\n transforms.Resize((resize_size, resize_size)),\n transforms.RandomCrop(crop_size),\n transforms.RandomHorizontalFlip(),\n transforms.ToTensor(),\n normalize\n ])\n\ndef image_test(resize_size=256, crop_size=224, alexnet=False):\n if not alexnet:\n normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],\n std=[0.229, 0.224, 0.225])\n else:\n normalize = Normalize(meanfile='./ilsvrc_2012_mean.npy')\n return transforms.Compose([\n transforms.Resize((resize_size, resize_size)),\n transforms.CenterCrop(crop_size),\n transforms.ToTensor(),\n normalize\n ])\n\ndef cal_acc(loader, netF, netB, netC, flag=False):\n start_test = True\n with torch.no_grad():\n iter_test = iter(loader)\n for i in range(len(loader)):\n data = iter_test.next()\n inputs = data[0]\n labels = data[1]\n inputs = inputs.cuda()\n outputs = netC(netB(netF(inputs)))\n if start_test:\n all_output = outputs.float().cpu()\n all_label = labels.float()\n start_test = False\n else:\n all_output = torch.cat((all_output, outputs.float().cpu()), 0)\n all_label = torch.cat((all_label, labels.float()), 0)\n _, predict = torch.max(all_output, 1)\n accuracy = torch.sum(torch.squeeze(predict).float() == all_label).item() / float(all_label.size()[0])\n mean_ent = torch.mean(loss.Entropy(nn.Softmax(dim=1)(all_output))).cpu().data.item()\n\n if flag:\n matrix = confusion_matrix(all_label, torch.squeeze(predict).float())\n acc = matrix.diagonal()/matrix.sum(axis=1) * 100\n aacc = acc.mean()\n aa = [str(np.round(i, 2)) for i in acc]\n acc = ' '.join(aa)\n return aacc, acc\n else:\n return accuracy*100, mean_ent\n\ndef train_distill(args):\n dset_loaders = data_load(args)\n # load sources\n if args.net[0:3] == 'res':\n netF_list = [network.ResBase(res_name=args.net).cuda() for i in range(len(args.src))]\n netF = network.ResBase(res_name=args.net).cuda()\n elif args.net[0:3] == 'vgg':\n netF_list = [network.VGGBase(vgg_name=args.net).cuda() for i in range(len(args.src))]\n netF = network.VGGBase(res_name=args.net).cuda() \n\n netB_list = [network.feat_bottleneck(type=args.classifier, feature_dim=netF_list[i].in_features, bottleneck_dim=args.bottleneck).cuda() for i in range(len(args.src))] \n netC_list = [network.feat_classifier(type=args.layer, class_num = args.class_num, bottleneck_dim=args.bottleneck).cuda() for i in range(len(args.src))]\n netG_list = [network.scalar(1).cuda() for i in range(len(args.src))]\n\n for i in range(len(args.src)):\n modelpath = args.output_dir_src + '/target_F_'+str(i)+'_par_0.3.pt'\n netF_list[i].load_state_dict(torch.load(modelpath))\n netF_list[i].eval()\n netF_list[i].cuda()\n for k, v in netF_list[i].named_parameters():\n v.requires_grad = False\n\n modelpath = args.output_dir_src + '/target_B_'+str(i)+'_par_0.3.pt'\n netB_list[i].load_state_dict(torch.load(modelpath))\n netB_list[i].eval()\n netB_list[i].cuda()\n for k, v in netB_list[i].named_parameters():\n v.requires_grad = False\n\n modelpath = args.output_dir_src + '/target_C_'+str(i)+'_par_0.3.pt'\n netC_list[i].load_state_dict(torch.load(modelpath))\n netC_list[i].eval()\n netC_list[i].cuda()\n for k, v in netC_list[i].named_parameters():\n v.requires_grad = False\n\n modelpath = args.output_dir_src + '/target_G_'+str(i)+'_par_0.3.pt'\n netG_list[i].load_state_dict(torch.load(modelpath))\n netG_list[i].eval()\n netG_list[i].cuda()\n for k, v in netG_list[i].named_parameters():\n v.requires_grad = False\n\n # create student\n netB = network.feat_bottleneck(type=args.classifier, feature_dim=netF.in_features, bottleneck_dim=args.bottleneck).cuda()\n netC = network.feat_classifier(type=args.layer, class_num = args.class_num, bottleneck_dim=args.bottleneck).cuda()\n\n param_group = []\n learning_rate = args.lr\n for k, v in netF.named_parameters():\n param_group += [{'params': v, 'lr': learning_rate}]\n for k, v in netB.named_parameters():\n param_group += [{'params': v, 'lr': learning_rate}]\n for k, v in netC.named_parameters():\n param_group += [{'params': v, 'lr': learning_rate}] \n optimizer = optim.SGD(param_group)\n optimizer = op_copy(optimizer)\n\n acc_init = 0\n max_iter = args.max_epoch * len(dset_loaders[\"target\"])\n interval_iter = max_iter // 10\n iter_num = 0\n\n netF.train()\n netB.train()\n netC.train()\n\n while iter_num < max_iter:\n try:\n inputs = iter_source.next()\n except:\n iter_source = iter(dset_loaders[\"target\"])\n inputs = iter_source.next()\n\n inputs = inputs[0]\n if inputs.size(0) == 1:\n continue\n\n iter_num += 1\n lr_scheduler(optimizer, iter_num=iter_num, max_iter=max_iter)\n\n labels, logits = get_labels(inputs, netF_list, netB_list, netC_list, netG_list)\n\n inputs, labels, logits = inputs.cuda(), labels.cuda(), logits.cuda()\n labels, logits = labels.detach(), logits.detach()\n outputs = netC(netB(netF(inputs)))\n classifier_loss = nn.CrossEntropyLoss()(outputs, labels) \n \n optimizer.zero_grad()\n classifier_loss.backward()\n optimizer.step()\n\n if iter_num % interval_iter == 0 or iter_num == max_iter:\n netF.eval()\n netB.eval()\n netC.eval()\n acc_s_te, _ = cal_acc(dset_loaders['test'], netF, netB, netC, False)\n log_str = 'Task: {}, Iter:{}/{}; Accuracy = {:.2f}%'.format(args.tgt, iter_num, max_iter, acc_s_te)\n # args.out_file.write(log_str + '\\n')\n # args.out_file.flush()\n print(log_str+'\\n')\n\n if acc_s_te >= acc_init:\n acc_init = acc_s_te\n best_netF = netF.state_dict()\n best_netB = netB.state_dict()\n best_netC = netC.state_dict()\n\n netF.train()\n netB.train()\n netC.train()\n \n torch.save(best_netF, osp.join(args.output_dir_src, \"source_F.pt\"))\n torch.save(best_netB, osp.join(args.output_dir_src, \"source_B.pt\"))\n torch.save(best_netC, osp.join(args.output_dir_src, \"source_C.pt\"))\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser(description='SHOT')\n parser.add_argument('--gpu_id', type=str, nargs='?', default='0', help=\"device id to run\")\n parser.add_argument('--t', type=int, default=0, help=\"target\") ## Choose which domain to set as target {0 to len(names)-1}\n parser.add_argument('--max_epoch', type=int, default=15, help=\"max iterations\")\n parser.add_argument('--interval', type=int, default=15)\n parser.add_argument('--batch_size', type=int, default=32, help=\"batch_size\")\n parser.add_argument('--worker', type=int, default=4, help=\"number of workers\")\n parser.add_argument('--dset', type=str, default='office-home', choices=['office', 'office-home', 'office-caltech'])\n parser.add_argument('--lr', type=float, default=1*1e-2, help=\"learning rate\")\n parser.add_argument('--net', type=str, default='resnet50', help=\"vgg16, resnet50, res101\")\n parser.add_argument('--temp', type=float, default=1.0)\n parser.add_argument('--seed', type=int, default=2020, help=\"random seed\")\n \n parser.add_argument('--gent', type=bool, default=True)\n parser.add_argument('--ent', type=bool, default=True)\n parser.add_argument('--threshold', type=int, default=0)\n parser.add_argument('--cls_par', type=float, default=0.3)\n parser.add_argument('--ent_par', type=float, default=1.0)\n parser.add_argument('--lr_decay1', type=float, default=0.1)\n parser.add_argument('--lr_decay2', type=float, default=1.0)\n\n parser.add_argument('--bottleneck', type=int, default=256)\n parser.add_argument('--epsilon', type=float, default=1e-5)\n parser.add_argument('--layer', type=str, default=\"wn\", choices=[\"linear\", \"wn\"])\n parser.add_argument('--classifier', type=str, default=\"bn\", choices=[\"ori\", \"bn\"])\n parser.add_argument('--distance', type=str, default='cosine', choices=[\"euclidean\", \"cosine\"]) \n parser.add_argument('--output', type=str, default='san')\n parser.add_argument('--output_src', type=str, default='ckps/adapt')\n parser.add_argument('--issave', type=bool, default=True)\n args = parser.parse_args()\n \n if args.dset == 'office-home':\n names = ['Art', 'Clipart', 'Product', 'Real_World']\n args.class_num = 65\n if args.dset == 'office':\n names = ['amazon', 'dslr' , 'webcam']\n args.class_num = 31\n if args.dset == 'office-caltech':\n names = ['amazon', 'caltech', 'dslr', 'webcam']\n args.class_num = 10\n\n args.src = []\n for i in range(len(names)):\n if i == args.t:\n continue\n else:\n args.src.append(names[i])\n args.tgt = names[args.t]\n\n os.environ[\"CUDA_VISIBLE_DEVICES\"] = args.gpu_id\n SEED = args.seed\n torch.manual_seed(SEED)\n torch.cuda.manual_seed(SEED)\n np.random.seed(SEED)\n random.seed(SEED)\n\n for i in range(len(names)):\n if i != args.t:\n continue\n folder = 'data/'\n args.t_dset_path = folder + args.dset + '/' + names[args.t] + '_list.txt'\n args.test_dset_path = folder + args.dset + '/' + names[args.t] + '_list.txt'\n\n \n args.output_dir_src = osp.join(args.output_src, args.dset, names[args.t][0].upper())\n print(args.output_dir_src)\n args.output_dir = osp.join(args.output, 'adapt_distill', args.dset, names[args.t][0].upper())\n\n if not osp.exists(args.output_dir):\n os.system('mkdir -p ' + args.output_dir)\n if not osp.exists(args.output_dir):\n os.mkdir(args.output_dir)\n\n args.savename = 'distill_' + str(args.cls_par)\n\n train_distill(args)" }, { "alpha_fraction": 0.710106372833252, "alphanum_fraction": 0.7452127933502197, "avg_line_length": 48.47368240356445, "blob_id": "eb3fe2b32d86d37e1a9c67cb4ff8fa7760172b8b", "content_id": "e243db49d581d93793c1f9bff6e1ff3c44057e63", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1880, "license_type": "no_license", "max_line_length": 318, "num_lines": 38, "path": "/README.md", "repo_name": "lllate/DECISION", "src_encoding": "UTF-8", "text": "# DECISION\nUnsupervised Multi-source Domain Adaptation Without Access to Source Data (CVPR '21 Oral)\n\n### Overview\nThis repository is a PyTorch implementation of the paper [Unsupervised Multi-source Domain Adaptation Without Access to Source Data](https://arxiv.org/pdf/2104.01845.pdf) published at [CVPR 2021](http://cvpr2021.thecvf.com/). This code is based on the [SHOT](https://github.com/tim-learn/SHOT) repository.\n\n### Dependencies\nCreate a conda environment with `environment.yml`.\n\n### Dataset\n- Manually download the datasets [Office](https://drive.google.com/file/d/0B4IapRTv9pJ1WGZVd1VDMmhwdlE/view), [Office-Home](https://drive.google.com/file/d/0B81rNlvomiwed0V1YUxQdC1uOTg/view), [Office-Caltech](http://www.vision.caltech.edu/Image_Datasets/Caltech256/256_ObjectCategories.tar) from the official websites.\n- Move `gen_list.py` inside data directory.\n- Generate '.txt' file for each dataset using `gen_list.py` (change dataset argument in the file accordingly). \n\n### Training\n- Train source models (shown here for Office with source A)\n```\npython train_source.py --dset office --s 0 --max_epoch 100 --trte val --gpu_id 0 --output ckps/source/\n```\n- Adapt to target (shown here for Office with target D)\n```\npython adapt_multi.py --dset office --t 1 --max_epoch 15 --gpu_id 0 --output_src ckps/source/ --output ckps/adapt\n```\n- Distill to single target model (shown here for Office with target D)\n```\npython distill.py --dset office --t 1 --max_epoch 15 --gpu_id 0 --output_src ckps/adapt --output ckps/dist\n```\n\n### Citation\nIf you use this code in your research please consider citing\n```\n@article{ahmed2021unsupervised,\n title={Unsupervised Multi-source Domain Adaptation Without Access to Source Data},\n author={Ahmed, Sk Miraj and Raychaudhuri, Dripta S and Paul, Sujoy and Oymak, Samet and Roy-Chowdhury, Amit K},\n journal={arXiv preprint arXiv:2104.01845},\n year={2021}\n}\n```\n" } ]
4
ostueker/workflow
https://github.com/ostueker/workflow
e8dcaa7822fdce83f23182e0601b9218c8d3c104
9be77583e4ce2bf1a345350ca02374ef52ac013a
3c50eda3657c14176a8513884925969c0f544e81
refs/heads/master
2021-01-23T01:17:22.757013
2017-05-30T19:03:05
2017-05-30T19:03:05
92,862,623
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5280715823173523, "alphanum_fraction": 0.5907241702079773, "avg_line_length": 36.24242401123047, "blob_id": "102ace7dd97d00a2fc77d4f59c3eaa4a5151ed15", "content_id": "c343bae0ed4bb3da3e19edd12667cb932a72d15d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1229, "license_type": "no_license", "max_line_length": 79, "num_lines": 33, "path": "/generate_data.py", "repo_name": "ostueker/workflow", "src_encoding": "UTF-8", "text": "#!/bin/env python\nimport numpy as np\nimport pandas as pd\n\ndef func(x, a, b):\n '''function ax+b for generating raw data'''\n return a * x + b\n\nif __name__ == '__main__' :\n data1 = np.random.normal(6.0, 0.5, 1000) # healthy\n data2 = np.random.normal(5.8, 2.0, 1000) # treated\n data3 = np.random.normal(4.0, 0.5, 1000) # untreated\n\n np.savetxt('raw_data/data_1a.txt', data1, header=\"Example Data: healthy\")\n np.savetxt('raw_data/data_1b.txt', data3, header=\"Example Data: treated\")\n np.savetxt('raw_data/data_1c.txt', data2, header=\"Example Data: untreated\")\n\n x = np.linspace(0, 10 , 101)\n y1 = func(x, 1, 2)\n yn1 = y1 + 0.9 * np.random.normal(size=(len(x)))\n\n y2 = func(x, -1, 12)\n yn2 = y2 + 0.9 * np.random.normal(size=(len(x)))\n\n y3 = func(x, 0.2, 5)\n yn3 = y3 + 0.5 * np.random.normal(size=(len(x)))\n\n pd.DataFrame({'x': x, 'y': yn1}).to_csv('raw_data/data_2a.csv', \n float_format=\"%12.8f\", header=False, index=False)\n pd.DataFrame({'x': x, 'y': yn2}).to_csv('raw_data/data_2b.csv', \n float_format=\"%12.8f\", header=False, index=False)\n pd.DataFrame({'x': x, 'y': yn3}).to_csv('raw_data/data_2c.csv', \n float_format=\"%12.8f\", header=False, index=False)\n" }, { "alpha_fraction": 0.75, "alphanum_fraction": 0.7563775777816772, "avg_line_length": 42.55555725097656, "blob_id": "862fa9e2ab93b15604430f8ed4c6b4716baf95bb", "content_id": "a0d6e0fce0007631a12b84702663babf7ecb9bd4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 784, "license_type": "no_license", "max_line_length": 74, "num_lines": 18, "path": "/README.md", "repo_name": "ostueker/workflow", "src_encoding": "UTF-8", "text": "# Automated pipelines and Workflows\n\nThis contains an example for using `make` to generate \na workflow for processing data, generating figures\nand assembling a report.\n\n* The initial state contains the following files:\n\t- `Workflow - initial.ipynp` - IPython Notebook with the initial analysis\n\t- `raw_data/data1[a-c].txt` - Data from a Bio-Essay.\n\t- `raw_data/data2[a-c].csv` - Some noisy data from calibration runs.\n\t- `generate_data.py` - Script to create above raw data.\n\n* Second stage has analysis split into separate pyhon files:\n\t- `process_data1.py` - Calculates means and standard-deviation\n\t- `make_figure1.py` - Creates barplot and histogram.\n\t- `make_figure2.py` - Performs linear regression and creates scatterplot.\n\n* Third stage has a Makefile for smarter processing.\n" }, { "alpha_fraction": 0.7264492511749268, "alphanum_fraction": 0.7590579986572266, "avg_line_length": 41.46154022216797, "blob_id": "8a32c0e16ba6524a79cfc0af0761401ee6516788", "content_id": "39b699b36a8dcc0f622f3ff11301d22b7a362449", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 552, "license_type": "no_license", "max_line_length": 114, "num_lines": 13, "path": "/make_report.sh", "repo_name": "ostueker/workflow", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\npython process_data1.py raw_data/data_1a.txt raw_data/data_1b.txt raw_data/data_1c.txt\nsleep 5 # let's pretend this step is slooooow\n\npython make_figure1.py raw_data/data_1a.txt raw_data/data_1b.txt raw_data/data_1c.txt processed_data/means.csv\n\npython make_figure2.py raw_data/data_2a.csv plots/figure_2a.svg\npython make_figure2.py raw_data/data_2c.csv plots/figure_2b.svg\npython make_figure2.py raw_data/data_2c.csv plots/figure_2c.svg\n\n# pdflatex -shell-escape report\nlatexmk -pdf -pdflatex=\"pdflatex -shell-escape\" report\n" }, { "alpha_fraction": 0.7132805585861206, "alphanum_fraction": 0.7440633177757263, "avg_line_length": 32.47058868408203, "blob_id": "b34ce6e0d7258903e29d8bcf68b0e0790c88290b", "content_id": "f3548323cb4d849e47acc50ec122ccb74da534bb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 1137, "license_type": "no_license", "max_line_length": 89, "num_lines": 34, "path": "/Makefile", "repo_name": "ostueker/workflow", "src_encoding": "UTF-8", "text": "# Makefile to process datafiles, generate a plot and build LaTeX report.\n#\n# Variable with list of files with raw data:\nDATA1=raw_data/data_1a.txt raw_data/data_1b.txt raw_data/data_1c.txt\nFIGURES=plots/figure_1.svg plots/figure_2a.svg plots/figure_2b.svg plots/figure_2c.svg\n\nreport.pdf: report.tex $(FIGURES)\n\tlatexmk report.tex -pdf -pdflatex='pdflatex -shell-escape'\n\nplots/figure_1.svg: make_figure1.py $(DATA1) processed_data/means.csv\n\tpython make_figure1.py $(DATA1) processed_data/means.csv\n\nplots/figure_2a.svg: make_figure2.py raw_data/data_2a.csv\n\tpython make_figure2.py raw_data/data_2a.csv plots/figure_2a.svg\n\nplots/figure_2b.svg: make_figure2.py raw_data/data_2b.csv\n\tpython make_figure2.py raw_data/data_2b.csv plots/figure_2b.svg\n\nplots/figure_2c.svg: make_figure2.py raw_data/data_2c.csv\n\tpython make_figure2.py raw_data/data_2c.csv plots/figure_2c.svg\n\nprocessed_data/means.csv: process_data1.py $(DATA1)\n\tpython process_data1.py $(DATA1)\n\n.PHONY: clean almost_clean\n\nclean: almost_clean\n\trm report.pdf\n\trm plots/figure*\n\trm processed_data/means.csv\n\nalmost_clean:\n\tlatexmk -c\n\trm *.pyg" }, { "alpha_fraction": 0.5923322439193726, "alphanum_fraction": 0.6325878500938416, "avg_line_length": 30.93877601623535, "blob_id": "2ce4768c9aca964b9b76522218f60f2b90a9e1dc", "content_id": "953b056cbbff3472a0e95ebae5577125d2b435b9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1565, "license_type": "no_license", "max_line_length": 93, "num_lines": 49, "path": "/make_figure1.py", "repo_name": "ostueker/workflow", "src_encoding": "UTF-8", "text": "#!/bin/env python\nimport sys\nimport numpy as np\nimport pandas as pd\nimport matplotlib as mpl\nmpl.use('Agg') # Don't use QT backend\nimport matplotlib.pyplot as plt\n\nmpl.rcParams['errorbar.capsize'] = 3\nmpl.rcParams['grid.linestyle'] = ':'\n\nif __name__ == '__main__':\n\n if len(sys.argv[1:]) != 4:\n print('Please supply exactly four data files.\\n e.g.:')\n print('python {:} data_1a.txt data_1b.txt data_1c.txt means.csv'.format(sys.argv[0]))\n sys.exit(1)\n\n # load data\n data_1a = np.loadtxt( sys.argv[1])\n data_1b = np.loadtxt( sys.argv[2])\n data_1c = np.loadtxt( sys.argv[3])\n means = pd.read_csv(sys.argv[4])\n\n # make plots for Bio Essay\n fig1 = plt.figure(1, (10, 4))\n fig1.suptitle(\"Bio Assay\")\n\n # Subplot 1: Barplot with error bars:\n ax1 = plt.subplot(121)\n means['means'].plot(kind='bar', yerr=means['err'], title='Averages w/ error bars')\n ax1.yaxis.set_ticks(np.arange(1,10,1), minor=True)\n ax1.yaxis.set_ticks_position(position='both')\n ax1.yaxis.grid(which='both' )\n for label in ax1.get_xmajorticklabels():\n label.set_rotation(30)\n label.set_horizontalalignment(\"right\")\n\n # Subplot 1: three histograms:\n ax2 = plt.subplot(122)\n ax2.set_title('Distribution')\n ax2.hist(data_1a, bins=30, alpha=0.5, label='healthy' )\n ax2.hist(data_1b, bins=30, alpha=0.5, label='treated' )\n ax2.hist(data_1c, bins=30, alpha=0.5, label='untreated' )\n ax2.legend()\n\n figname = \"plots/figure_1.svg\"\n fig1.savefig(figname)\n print('Saved: \"{:}\"'.format(figname))\n" }, { "alpha_fraction": 0.5555555820465088, "alphanum_fraction": 0.5803418755531311, "avg_line_length": 25, "blob_id": "c0d0ebe3ffc6a44356ad16d11b75743c9c2ad677", "content_id": "9413f0a0c26cd759c07164d11275bf4debc9e14b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1170, "license_type": "no_license", "max_line_length": 74, "num_lines": 45, "path": "/make_figure2.py", "repo_name": "ostueker/workflow", "src_encoding": "UTF-8", "text": "#!/bin/env python\nimport re\nimport sys\nimport os.path\nimport numpy as np\nimport matplotlib as mpl\nmpl.use('Agg') # Don't use QT backend\nimport matplotlib.pyplot as plt\nfrom scipy.optimize import curve_fit\n\ndef func(x, a, b):\n '''function ax+b for fitting'''\n return a * x + b\n\nif __name__ == '__main__':\n if len(sys.argv[1:]) != 2:\n print('Please supply one data file and one image name. e.g.:')\n print('python {:} data_2a.csv figure_2a.png'.format(sys.argv[0]))\n sys.exit(1)\n\n file_name = sys.argv[1]\n figname = sys.argv[2]\n\n data_2 = np.loadtxt(file_name, delimiter=',')\n data_2 = data_2.transpose()\n basename = os.path.basename(file_name)\n\n x = data_2[0]\n y_raw = data_2[1]\n \n # fitting raw data to function\n popt, pconv = curve_fit(func, x, y_raw)\n a, b = popt[0], popt[1]\n print(\"{:}\\ta: {:12.8f}, b: {:12.8f}\".format(basename, a, b))\n y_fit = func(x, a, b )\n\n plt.figure(figsize=(8, 6))\n plt.title(file_name)\n plt.scatter(x, y_raw)\n plt.plot(x, y_fit, 'k--')\n plt.ylim(ymin=0 ) \n # plt.ylim(0, 14)\n\n plt.savefig(figname)\n print('Saved: \"{:}\"'.format(figname))\n" }, { "alpha_fraction": 0.5465768575668335, "alphanum_fraction": 0.5701459050178528, "avg_line_length": 26.84375, "blob_id": "ef9b550f6a3540e25c19032d4f9267818bd5017e", "content_id": "8d4a1855c26f3472e922d073b19d519e1f808712", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 891, "license_type": "no_license", "max_line_length": 84, "num_lines": 32, "path": "/process_data1.py", "repo_name": "ostueker/workflow", "src_encoding": "UTF-8", "text": "#!/bin/env python\nimport numpy as np\nimport pandas as pd\nimport sys\n\nif __name__ == '__main__':\n\n if len(sys.argv[1:]) != 3:\n print('Please supply three data files. e.g.:')\n print('python {:} data_1a.txt data_1b.txt data_1c.txt '.format(sys.argv[0]))\n sys.exit(1)\n\n # load bio essay data into np arrays\n data_1a = np.loadtxt(sys.argv[1])\n data_1b = np.loadtxt(sys.argv[2])\n data_1c = np.loadtxt(sys.argv[3])\n print('Loaded datafiles: {:}'.format(', '.join(sys.argv[1:4])))\n\n # calculate averages and standard deviation\n df1 = pd.DataFrame({\n 'healthy': data_1a,\n 'treated': data_1b,\n 'untreated': data_1c,\n })\n\n means = pd.DataFrame({ \n 'means': df1.mean(),\n 'err': df1.std(),\n })[['means', 'err']]\n\n means.to_csv('processed_data/means.csv')\n print('Generated processed_data/means.csv')\n" } ]
7
renukakasbe/MEDIHISTORY
https://github.com/renukakasbe/MEDIHISTORY
3c03bbe56bfc677e134510f12eba1511b2f821ad
a09a48e3944c87db13a9a8cecd29eef2cd8af400
88bdcd99b4504572e8e9819c69d08e826642a138
refs/heads/master
2023-05-04T05:26:18.082811
2021-05-30T17:19:15
2021-05-30T17:19:15
372,014,197
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5528255701065063, "alphanum_fraction": 0.6068795919418335, "avg_line_length": 21.61111068725586, "blob_id": "d1511beddb69db25aaa1eda3ffa5b4da8b68d263", "content_id": "faa5cffc0278a0e991b7825a1177af8f4cf2ca0f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 407, "license_type": "no_license", "max_line_length": 57, "num_lines": 18, "path": "/medical/record/migrations/0009_alter_medicalrecord_medicines.py", "repo_name": "renukakasbe/MEDIHISTORY", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-05-25 19:22\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('record', '0008_alter_medicalrecord_medicines'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='medicalrecord',\n name='medicines',\n field=models.CharField(max_length=300),\n ),\n ]\n" }, { "alpha_fraction": 0.5331325531005859, "alphanum_fraction": 0.5903614163398743, "avg_line_length": 18.52941131591797, "blob_id": "fce601ab96d31b736a2f7006d9e7bcbeb6901a80", "content_id": "80d5a5c48e52c18a7253296b6f7381038479cc31", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 332, "license_type": "no_license", "max_line_length": 47, "num_lines": 17, "path": "/medical/record/migrations/0007_remove_medicalrecord_givendate.py", "repo_name": "renukakasbe/MEDIHISTORY", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-05-25 19:18\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('record', '0006_medicalrecord'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='medicalrecord',\n name='givenDate',\n ),\n ]\n" }, { "alpha_fraction": 0.5029516220092773, "alphanum_fraction": 0.5371900796890259, "avg_line_length": 24.66666603088379, "blob_id": "7517441f281263fcf26a260dbea19395ca74381f", "content_id": "25c0402a9ff2fccdf6735e28018ba3c4ee48431c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 847, "license_type": "no_license", "max_line_length": 51, "num_lines": 33, "path": "/medical/record/migrations/0002_auto_20210522_2144.py", "repo_name": "renukakasbe/MEDIHISTORY", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-05-22 16:14\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('record', '0001_initial'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='person',\n name='blood_group',\n field=models.CharField(max_length=20),\n ),\n migrations.AlterField(\n model_name='person',\n name='first_name',\n field=models.CharField(max_length=300),\n ),\n migrations.AlterField(\n model_name='person',\n name='gender',\n field=models.CharField(max_length=30),\n ),\n migrations.AlterField(\n model_name='person',\n name='last_name',\n field=models.CharField(max_length=200),\n ),\n ]\n" }, { "alpha_fraction": 0.5571776032447815, "alphanum_fraction": 0.6107056140899658, "avg_line_length": 21.83333396911621, "blob_id": "f33bb5a8d85204d3a806ee877e21e783726e9ff9", "content_id": "65bee7bc61075bd1a940e57259d403e00e0836b2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 411, "license_type": "no_license", "max_line_length": 58, "num_lines": 18, "path": "/medical/record/migrations/0008_alter_medicalrecord_medicines.py", "repo_name": "renukakasbe/MEDIHISTORY", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-05-25 19:20\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('record', '0007_remove_medicalrecord_givendate'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='medicalrecord',\n name='medicines',\n field=models.IntegerField(max_length=300),\n ),\n ]\n" }, { "alpha_fraction": 0.6909722089767456, "alphanum_fraction": 0.71875, "avg_line_length": 24.954545974731445, "blob_id": "59e95b6c263e952b9f09589d13d0708282b1fd64", "content_id": "041d4237100aff29198c3c6f55c2b66dba52fcd9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 576, "license_type": "no_license", "max_line_length": 46, "num_lines": 22, "path": "/medical/record/models.py", "repo_name": "renukakasbe/MEDIHISTORY", "src_encoding": "UTF-8", "text": "from django.db import models\nfrom datetime import datetime\n\n#create your model here.\nclass Person(models.Model):\n name=models.CharField(max_length=300)\n age=models.IntegerField(default=0)\n bloodGroup=models.CharField(max_length=20)\n gender=models.CharField(max_length=30)\n\n def __str__(self):\n return self.Person\nclass MedicalRecord(models.Model):\n symptoms=models.CharField(max_length=300)\n medicines=models.CharField(max_length=300)\n membername=models.CharField(max_length=20)\n\n\n\n\n def __str__(self):\n return self.MedicalRecord\n\n\n\n\n\n" }, { "alpha_fraction": 0.6813725233078003, "alphanum_fraction": 0.6813725233078003, "avg_line_length": 27.52941131591797, "blob_id": "7b1d67da36402586b22dd0948b53df49aedc4d82", "content_id": "5a1cf2da103bfb98ab5b62e09588409795bc9d4a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2040, "license_type": "no_license", "max_line_length": 78, "num_lines": 68, "path": "/medical/record/views.py", "repo_name": "renukakasbe/MEDIHISTORY", "src_encoding": "UTF-8", "text": "from django.shortcuts import render,redirect\nfrom django.http import HttpResponse\n\n\n\nfrom.models import Person\nfrom.models import MedicalRecord\nfrom.forms import PersonForm\nfrom.forms import MedicalRecordForm\n\nfrom django import views\n\n# Create your views here.\ndef welcome(request):\n return render(request, \"welcome.html\")\n\ndef load_form(request):\n form = PersonForm\n return render(request, \"index.html\", {'form': form})\n\ndef add(request):\n form = PersonForm(request.POST)\n print(\"in add method \");\n print(form.data.values())\n form.save()\n person = Person.objects.all\n return render(request, \"show.html\", {'person':person})\n\ndef show(request):\n # we create a instance for model form we create photo copy of model form\n # to catch data objects.all()\n person =Person.objects.all\n print(\"in show method \")\n print(person)\n # we to show the details on show.html for that purpuse we write below code\n return render(request, 'show.html', {'person':person})\n\n\ndef addMedicalRecord(request):\n person = Person.objects.all\n return render(request, 'addMedicalRecord.html', {'person': person})\n\ndef saveMedicalRecord(request):\n form = MedicalRecordForm(request.POST)\n print(\"in add method \");\n print(form.data.values())\n form.save()\n medical = MedicalRecord.objects.all\n return render(request, \"showMedicalRecord.html\", {'medical':medical})\n\ndef viewMedicalRecords(request):\n # we create a instance for model form we create photo copy of model form\n # to catch data objects.all()\n medical = MedicalRecord.objects.all\n return render(request, \"showMedicalRecord.html\", {'medical': medical})\n\n\n\ndef delete_data(request,id):\n person=Person.objects.get(id=id)\n person.delete()\n return redirect(\"/show\")\n\n\ndef delete_medicalRecord(request,id):\n medicalRecord=MedicalRecord.objects.get(id=id)\n medicalRecord.delete()\n return redirect(\"/viewMedicalRecords\")\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.6776859760284424, "alphanum_fraction": 0.6776859760284424, "avg_line_length": 27, "blob_id": "39d8249db8a5789a21ad4153b1a09e0247e77e33", "content_id": "aab7ad347320825dbc358b66d4a84aec231379a7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 363, "license_type": "no_license", "max_line_length": 56, "num_lines": 13, "path": "/medical/record/forms.py", "repo_name": "renukakasbe/MEDIHISTORY", "src_encoding": "UTF-8", "text": "from django import forms\nfrom .models import Person\nfrom .models import MedicalRecord\n\nclass PersonForm(forms.ModelForm):\n class Meta:\n model = Person\n fields = (\"name\", \"age\", \"bloodGroup\", \"gender\")\n\nclass MedicalRecordForm(forms.ModelForm):\n class Meta:\n model = MedicalRecord\n fields = (\"symptoms\", \"medicines\", \"membername\")" }, { "alpha_fraction": 0.7549857497215271, "alphanum_fraction": 0.7549857497215271, "avg_line_length": 20.875, "blob_id": "f24f6b2d2bc1a13f1916b890b11f9d64b66ee8d9", "content_id": "d1df40958398a93ad682beab3ef2f9c50292f394", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 351, "license_type": "no_license", "max_line_length": 55, "num_lines": 16, "path": "/medical/record/admin.py", "repo_name": "renukakasbe/MEDIHISTORY", "src_encoding": "UTF-8", "text": "from django.contrib import admin\nfrom .models import Person\nfrom .models import MedicalRecord\n\n\nclass PersonAdmin(admin.ModelAdmin):\n display=['name','age','bloodGroup','gender']\n\nclass MedicalRecordAdmin(admin.ModelAdmin):\n display = ['symptoms','medicines','membername']\n\n\n\nadmin.site.register(Person)\n\nadmin.site.register(MedicalRecord)\n\n" }, { "alpha_fraction": 0.49352332949638367, "alphanum_fraction": 0.5194300413131714, "avg_line_length": 23.125, "blob_id": "aab61c0381478c1a38e013e1bd505de267ecb7f0", "content_id": "10648c2be332e44271872122396d41c9a2edf269", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 772, "license_type": "no_license", "max_line_length": 49, "num_lines": 32, "path": "/medical/record/migrations/0005_auto_20210525_1755.py", "repo_name": "renukakasbe/MEDIHISTORY", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-05-25 12:25\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('record', '0004_rename_user_person'),\n ]\n\n operations = [\n migrations.RenameField(\n model_name='person',\n old_name='blood_group',\n new_name='bloodGroup',\n ),\n migrations.RenameField(\n model_name='person',\n old_name='first_name',\n new_name='name',\n ),\n migrations.RemoveField(\n model_name='person',\n name='last_name',\n ),\n migrations.AddField(\n model_name='person',\n name='age',\n field=models.IntegerField(default=0),\n ),\n ]\n" } ]
9
NikolaiNielsen/VildlederEco
https://github.com/NikolaiNielsen/VildlederEco
da39e253cd2324c32f289a132b09dcd30a4357e7
dfa6727a9185831f1f9f70bba953d3a3ce527b0c
1dbd2a8557b77260f535a29790d203483213413f
refs/heads/master
2022-08-11T02:11:46.381732
2020-05-18T17:26:29
2020-05-18T17:26:29
265,009,701
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6447223424911499, "alphanum_fraction": 0.6539168953895569, "avg_line_length": 32.98749923706055, "blob_id": "5027fcc69389d3ec6af14da6165b3a56919e0461", "content_id": "b870204ad0c4a6a7ae7540461c7eb45ce7147856", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2719, "license_type": "no_license", "max_line_length": 86, "num_lines": 80, "path": "/vildlederCheck.py", "repo_name": "NikolaiNielsen/VildlederEco", "src_encoding": "UTF-8", "text": "import pickle\nimport os.path\nimport sys\nfrom operator import itemgetter\nfrom googleapiclient.discovery import build\nfrom google_auth_oauthlib.flow import InstalledAppFlow\nfrom google.auth.transport.requests import Request\n\n# We both want to read and write to the spreadsheet\nSCOPES = ['https://www.googleapis.com/auth/spreadsheets']\nSHEET_ID = '1XgJCDX78jg_ib_wBJUsKxRkUxmgbQ6_33l44BfRHJo0'\nINPUTOPTION = 'USER_ENTERED'\nRANGES = 'A4:H'\nNUM_COLS = 8\nSHEETS = ['Tema', 'Mad']\nRANGE = [\"!\".join((x, RANGES)) for x in SHEETS]\n\n\ndef get_unique_el(elements, sort_by=(0, 1)):\n \"\"\"Returns a list of unique list, sorted by the\n n'th element of the lists. sort_by supports tuples and ints.\n \"\"\"\n uniques = [list(x) for x in set(tuple(x) for x in elements)]\n sort = sorted(uniques, key=itemgetter(*sort_by))\n return sort\n\n\ndef auth():\n \"\"\"Perform authentication if needed.\n Ripped straight from quickstart.py from Google.\n \"\"\"\n creds = None\n if os.path.exists('token.pickle'):\n with open('token.pickle', 'rb') as token:\n creds = pickle.load(token)\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n 'credentials.json', SCOPES)\n creds = flow.run_local_server(port=0)\n with open('token.pickle', 'wb') as token:\n pickle.dump(creds, token)\n service = build('sheets', 'v4', credentials=creds)\n sheet = service.spreadsheets()\n return sheet\n\n\ndef propagate_down(values, columnID):\n # Propagates the value of a given cell down through empty cells, in a given column\n for i in range(len(values)-1):\n if len(values[i+1][columnID]) == 0:\n values[i+1][columnID] = values[i][columnID]\n return values\n\n\ndef prepare_sheet_results(values, cols=[0, 1, 5, 6, 7]):\n # Values are returned as a list of lists, each of which contain the cell contents.\n # Exclude empty rows - they correspond to empty lists.\n values = [x for x in values if len(x) == NUM_COLS]\n # propagate down the category value\n values = propagate_down(values, 0)\n # Keep only certain columns.\n usable_values = [[row[i] for i in cols] for row in values]\n return usable_values\n\n\ndef main():\n sheet = auth()\n result = sheet.values().batchGet(spreadsheetId=SHEET_ID,\n ranges=RANGE).execute()\n valueRange = result.get('valueRanges', [])\n values = [Range.get('values', []) for Range in valueRange]\n prepared_values = [prepare_sheet_results(x) for x in values]\n print(prepared_values)\n\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.6557700634002686, "alphanum_fraction": 0.6618034243583679, "avg_line_length": 35.031620025634766, "blob_id": "ceb8c6a8cf0cbeee5359c6e699dcdc79e692d4e0", "content_id": "bf4ca9d45bdb5d59f531cd5fb7fedad9fec75796", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9167, "license_type": "no_license", "max_line_length": 79, "num_lines": 253, "path": "/vildlederCheckLokal.py", "repo_name": "NikolaiNielsen/VildlederEco", "src_encoding": "UTF-8", "text": "\"\"\"\nForfatter: Nikolai Nielsen ([email protected])\nEt script til at hjælpe med at gøre Vildlederøkonomien lidt nemmere:\n- Gennemgår alle arkene set i variablen \"SHEETS\", finder alle indtastninger, og\n laver en oversigt over, hvad hver person har udlagt, under hvilken kategori,\n og med hvilken betalingsmetode.\n\nFremtidige forhåbninger:\n- Automatisk genkendelse af, hvor de relevante data er, i hvert ark, så det er\n mere robust\n- Grafisk brugerflade for dataindtastning\n- Grafisk brugerflade til valg af fil\n\nBRUG AF DETTE PROGRAM\n- Programmet bruger pakker fra standardbiblioteket, samt \"openpyxl\" (version\n 3.0.0), der skal installeres separat, for eksempel gennem pip. Den er også\n inkluderet med Anaconda distributionen.\n- For at benytte dette program skal du have en lokal kopi af regnskabet i\n xlsx-format. Du skal ændre \"WORKBOOK_NAME\" til den relative sti til\n regnearket. Hvis regnearket er i samme mappe som dette program, kan du bare\n sætte WORKBOOK_NAME til at være filens navn (husk fil-typen!).\n- Du skal ændre \"SHEETS\" til en liste af strings over hvilke ark, der skal\n tjekkes igennem. Som regel behøver denne ikke at blive ændret fra de normale,\n med mindre, der bliver ændret kategorier af øko-gruppen efter dette program\n er oprettet.\n- Programmet forventer at data er indtastet fra kolonne A til H, og fra række 4\n og nedad. Det forventes af kolonnerne er som følger:\n - A: Bilagskode - kategorien af indkøbet. eksempelvis \"Tema\"\n - B: Bilagsnummer - Indkøbets nummer. Starter med 01, så 02 og så videre.\n Sammen med bilagskoden udgør dette en samlet \"ID\" for\n købet, eksempelvis Tema01.\n - C: NBI Rekvireringsnummer - som regel ikke vigtig. Bruges ikke her\n - D: Tekst - beskrivelse af købet\n - E: Status på bilag - Hvor ligger kvitteringen henne? (den skal helst i\n mappen eller digitalt på dropbox eller lign.)\n - F: Beløb - hvor meget har dette indkøb kostet?\n - G: metode - hvordan indkøbet er foretaget. Der er 3 måder pt, set under\n \"PAYMENT_METHODS\". I arket forventes det at dette er et heltal,\n svarende til positionen i PAYMENT_METHODS (så 0=kontokort, etc)\n - H: Navn - hvem har foretaget købet? Der skelnes mellem store og små\n bogstaver!\n- Det er egentlig kun kolonnerne A, B, F, G og H, der bliver brugt.\n- \"RANGES\" bør kun ændres, hvis layouttet af regnearket ændres. Ligeledes skal\n \"NUM_COLS\" svare til hvor mange kolonner der skal læses fra (8, i dette\n tilfælde, da der skal læses fra A til H).\n- Når disse ting er sørget for, skal programmet bare køres. Så bliver der\n automatisk indlæst og oprettet et nyt ark, \"Opsummering\", hvor der står en\n opsummering over, hvad hvert \"navn\" har købt, hvilke kvitteringer samt samlet\n beløb for hver betallingsmulighed. Så kan man nemt se, hvis man har skrevet\n et navn forkert, og hvor mange penge, hver person skal have tilbage gennem\n REJS-ud, samt hvor mange penge, der skal tilbagebetales til vejlederkontoen\n (i form af tilskud)\n\"\"\"\n\nimport openpyxl\nimport pprint\nfrom decimal import Decimal\nfrom operator import itemgetter\n\nWORKBOOK_NAME = \"ProperVildleder2019.xlsx\"\nSHEETS = ['Tema', 'Mad']\nPAYMENT_METHODS = [\"kontokort\", \"vejlederkort\", \"personlig\"]\n\nRANGES = 'A4:H'\nNUM_COLS = 8\n\n\ndef propagate_down(values, columnID=0):\n \"\"\"\n Propagates the value of a given sublist down through empty sublists, in a\n given column. Ie, if the first entry of sublist 1 is \"Mad\", and the first\n entry of sublist 2 is None, then this code propagates the \"Mad\" down\n through the sublists, until it hits a non-None. Thus making sure that the\n column is fully populated.\n\n Assumes values is a list of lists.\n \"\"\"\n for i in range(len(values)-1):\n if values[i+1][columnID] is None:\n values[i+1][columnID] = values[i][columnID]\n return values\n\n\ndef prepare_sheet_results(values, cols=[0, 1, 5, 6, 7]):\n \"\"\"\n Takes the values of the sheet and prepares them for the summary.\n Assumes values is a list of lists.\n \"\"\"\n\n # Values are returned as a list of lists, each of which contain the\n # cell contents. Default columns are\n # - Category\n # - receipt number\n # - price\n # - payment method\n # - name\n\n # Exclude empty rows - they have all None values.\n values = [x for x in values if x != [None]*NUM_COLS]\n\n # propagate down the category value\n values = propagate_down(values, 0)\n\n # Keep only certain columns.\n usable_values = [[row[i] for i in cols] for row in values]\n\n # Combine category and receipt number - pad with a single 0\n new_cat = [f'{row[0]}{int(row[1]):02d}' for row in usable_values]\n\n # include the new category in the listings (category, price, payment, name)\n combined = [[z[0], *z[1][2:]] for z in zip(new_cat, usable_values)]\n\n return combined\n\n\ndef get_values_from_sheet(sheet):\n # Find the max row count of the sheet and get the cells\n maxrow = sheet.max_row\n cells = sheet[f\"{RANGES}{sheet.max_row}\"]\n\n # Extract the raw values of each cell and store it in a list of lists\n values = [[i.internal_value for i in rows] for rows in cells]\n\n # prep the values and return the sheet\n prepped_values = prepare_sheet_results(values)\n return prepped_values\n\n\ndef combine_sheets(sheet_lists):\n \"\"\"\n Combines the sheets. Assumes sheet_lists is a list of sheets (which\n themselves are a list of list)\n \"\"\"\n combined = []\n for sheet in sheet_lists:\n combined = combined + sheet\n return combined\n\n\ndef process_data(sheet):\n \"\"\"\n Takes the prepared data and processes it into the format used in the\n summary sheet.\n \"\"\"\n # The vildleder has the structure of vildleder -> name -> payment method ->\n # receipt numbers and price\n vildleder = {}\n names = []\n # Get the names\n names = [row[-1] for row in sheet]\n unique_names = list(set(names))\n\n # Create the dictionary template\n for name in unique_names:\n vildleder[name] = {method: {\"kvitteringer\": [], \"beløb\": Decimal(0)}\n for method in PAYMENT_METHODS}\n\n # populate the dictionary\n for row in sheet:\n receipt, price, method, name = row\n method = PAYMENT_METHODS[int(method)]\n # Round the amount to two decimals, and use Decimal. Probably an over\n # cautious approac.\n vildleder[name][method]['beløb'] += round(Decimal(price), 2)\n vildleder[name][method]['kvitteringer'].append(receipt)\n\n return vildleder\n\n\ndef populate_sheet(vildleder, workbook):\n \"\"\"\n Takes the processed data from dictionary and populates the sheet.\n \"\"\"\n sheet_name = \"Opsummering\"\n\n # Make sure the sheet is cleared\n if sheet_name in workbook:\n workbook.remove(workbook[sheet_name])\n\n workbook.create_sheet(sheet_name, 2)\n sheet = workbook[sheet_name]\n\n name_row = 4\n name_col = 2\n method_offset = 1\n price_offset = 2\n receipt_offset = 4\n names = sorted(list(vildleder.keys()))\n for n, name in enumerate(names):\n # Set names\n c = sheet.cell(row=name_row, column=name_col+n*len(PAYMENT_METHODS))\n c.value = name\n for m, method in enumerate(PAYMENT_METHODS):\n # Set payment methods\n c = sheet.cell(row=name_row+method_offset,\n column=name_col+n*len(PAYMENT_METHODS)+m)\n c.value = method\n\n # Set total amount paid\n c = sheet.cell(row=name_row+price_offset,\n column=name_col+n*len(PAYMENT_METHODS)+m)\n c.value = vildleder[name][method][\"beløb\"]\n\n # List receipts\n receipts = vildleder[name][method][\"kvitteringer\"]\n for i, receipt in enumerate(receipts):\n c = sheet.cell(row=name_row+receipt_offset+i,\n column=name_col+n*len(PAYMENT_METHODS)+m)\n c.value = receipt\n\n # Set the \"fucked receipts\" title\n sheet[f'A{name_row}'] = \"Kvitteringer Med Fejl\"\n\n # Set column widths\n def as_text(val): return str(val) if val is not None else \"\"\n for column_cells in sheet.columns:\n length = max(len(as_text(cell.value)) for cell in column_cells)\n sheet.column_dimensions[openpyxl.utils.get_column_letter(\n column_cells[0].column)].width = length\n\n # Set title\n sheet['A1'] = sheet_name\n f = openpyxl.styles.Font(size=18)\n sheet['A1'].font = f\n sheet.merge_cells(\"A1:D1\")\n\n\ndef main():\n \"\"\"\n Open workbook. Get values from each worksheet in SHEETS, and combine them\n into one list. Process the data and create the summary sheet in the\n workbook.\n \"\"\"\n wb = openpyxl.load_workbook(WORKBOOK_NAME)\n\n sheets = []\n for name in SHEETS:\n sheet = wb[name]\n values = get_values_from_sheet(sheet)\n sheets.append(values)\n\n sheet = combine_sheets(sheets)\n\n vildleder = process_data(sheet)\n\n populate_sheet(vildleder, wb)\n\n # Finally, save the workbook\n wb.save(WORKBOOK_NAME)\n\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.7299173474311829, "alphanum_fraction": 0.7342149019241333, "avg_line_length": 57.19230651855469, "blob_id": "fa706b9666ec8a9cbccc6e3248a241c5215f1556", "content_id": "1f661130f6017edc03697244f9ec29ef93acaf26", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3073, "license_type": "no_license", "max_line_length": 79, "num_lines": 52, "path": "/README.md", "repo_name": "NikolaiNielsen/VildlederEco", "src_encoding": "UTF-8", "text": "# VildlederEco\n\nEt script til at hjælpe med at gøre Vildlederøkonomien lidt nemmere:\n- Gennemgår alle arkene set i variablen \"SHEETS\", finder alle indtastninger, og\n laver en oversigt over, hvad hver person har udlagt, under hvilken kategori,\n og med hvilken betalingsmetode.\n\n### Fremtidige forhåbninger:\n- Automatisk genkendelse af, hvor de relevante data er, i hvert ark, så det er\n mere robust\n- Grafisk brugerflade for dataindtastning\n- Grafisk brugerflade til valg af fil\n\n### BRUG AF DETTE PROGRAM\n- Programmet bruger pakker fra standardbiblioteket, samt \"openpyxl\" (version\n 3.0.0), der skal installeres separat, for eksempel gennem pip. Den er også\n inkluderet med Anaconda distributionen.\n- For at benytte dette program skal du have en lokal kopi af regnskabet i\n xlsx-format. Du skal ændre \"WORKBOOK_NAME\" til den relative sti til\n regnearket. Hvis regnearket er i samme mappe som dette program, kan du bare\n sætte WORKBOOK_NAME til at være filens navn (husk fil-typen!).\n- Du skal ændre \"SHEETS\" til en liste af strings over hvilke ark, der skal\n tjekkes igennem. Som regel behøver denne ikke at blive ændret fra de normale,\n med mindre, der bliver ændret kategorier af øko-gruppen efter dette program\n er oprettet.\n- Programmet forventer at data er indtastet fra kolonne A til H, og fra række 4\n og nedad. Det forventes af kolonnerne er som følger:\n - A: Bilagskode - kategorien af indkøbet. eksempelvis \"Tema\"\n - B: Bilagsnummer - Indkøbets nummer. Starter med 01, så 02 og så videre.\n Sammen med bilagskoden udgør dette en samlet \"ID\" for\n købet, eksempelvis Tema01.\n - C: NBI Rekvireringsnummer - som regel ikke vigtig. Bruges ikke her\n - D: Tekst - beskrivelse af købet\n - E: Status på bilag - Hvor ligger kvitteringen henne? (den skal helst i\n mappen eller digitalt på dropbox eller lign.)\n - F: Beløb - hvor meget har dette indkøb kostet?\n - G: metode - hvordan indkøbet er foretaget. Der er 3 måder pt, set under\n \"PAYMENT_METHODS\". I arket forventes det at dette er et heltal,\n svarende til positionen i PAYMENT_METHODS (så 0=kontokort, etc)\n - H: Navn - hvem har foretaget købet? Der skelnes mellem store og små\n bogstaver!\n- Det er egentlig kun kolonnerne A, B, F, G og H, der bliver brugt.\n- \"RANGES\" bør kun ændres, hvis layouttet af regnearket ændres. Ligeledes skal\n \"NUM_COLS\" svare til hvor mange kolonner der skal læses fra (8, i dette\n tilfælde, da der skal læses fra A til H).\n- Når disse ting er sørget for, skal programmet bare køres. Så bliver der\n automatisk indlæst og oprettet et nyt ark, \"Opsummering\", hvor der står en\n opsummering over, hvad hvert \"navn\" har købt, hvilke kvitteringer samt samlet\n beløb for hver betallingsmulighed. Så kan man nemt se, hvis man har skrevet\n et navn forkert, og hvor mange penge, hver person skal have tilbage gennem\n REJS-ud, samt hvor mange penge, der skal tilbagebetales til vejlederkontoen\n (i form af tilskud)" } ]
3
MasterChenb0x/stalkerbot
https://github.com/MasterChenb0x/stalkerbot
0363a5a4936e3412f4b630a6124207057734fd3a
f75a0cc5907927ccb708109f54d9b2faa002b9d0
b20502b1fb829f25331dda6eb8f65da1747b453b
refs/heads/master
2021-12-22T17:22:46.099712
2021-07-11T19:53:25
2021-07-11T19:53:25
38,189,217
5
2
null
2015-06-28T06:34:21
2021-07-11T19:53:27
2021-12-15T00:15:30
Python
[ { "alpha_fraction": 0.7427350282669067, "alphanum_fraction": 0.7452991604804993, "avg_line_length": 29, "blob_id": "4d8f83ccfba60cd95e1a5434c49c657de82faa1a", "content_id": "62ee7bf277af2c3a09c40a7651028289227fd491", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Ruby", "length_bytes": 1170, "license_type": "no_license", "max_line_length": 223, "num_lines": 39, "path": "/grabber.rb", "repo_name": "MasterChenb0x/stalkerbot", "src_encoding": "UTF-8", "text": "#!/usr/local/rvm/rubies/ruby-2.1.2/bin/ruby\nrequire 'twitter'\n\n rClient = Twitter::REST::Client.new do |config|\n config.consumer_key = \"CONSUMER_KEY\"\n config.consumer_secret = \"CONSUMER_SECRET\"\n config.access_token = \"ACCESS_TOKEN\"\n config.access_token_secret = \"ACCESS_TOKEN_SECRET\"\n end\n\n#Get a list of your TARGET's followers\nfollower_ids = []\nrClient.follower_ids(\"TARGET\").each do |id|\n\tfolllower_ids.push(id)\nend\n\n#Get a list of YOUR followers\nfriend_ids = []\nrClient.friend_ids(\"YOUR_HANDLE\").each do |id|\n\tfriend_ids.push(id)\nend\n\n#Follow the followers of your TARGET who are not yet being followed\nrClient.follow(follower_ids - friend_ids)\n\n#Get a list of the Tweeters your TARGET is FOLLOWING\nfriend_idz = []\nrClient.friend_ids(\"TARGET\").each do |id|\n\tfriend_idz.push(id)\nend\n\nputs friend_idz\n\n#Follow the Tweeters your TARGET is following who you have not yeet followed\nrClient.follow(friend_idz - friend_ids)\n\n=begin\nAt this point in the code, there are two things that have happened. You are following all of the people who follow your TARGET, AND you should now be following everyone who your TARGET follows; both sides of the follow coin\n=end\n" }, { "alpha_fraction": 0.6962617039680481, "alphanum_fraction": 0.7149532437324524, "avg_line_length": 25.75, "blob_id": "8e1944e18f60ceeee0207fceb301a8f7ce3f6493", "content_id": "9729c79bbaf52a0dbd74735caf9b50037f4cabeb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 214, "license_type": "no_license", "max_line_length": 103, "num_lines": 8, "path": "/tweet_scrape_target.sh", "repo_name": "MasterChenb0x/stalkerbot", "src_encoding": "UTF-8", "text": "#!/bin/bash\nsearch=$1\nfilename='TARGETTWEETS.txt'\nfilelines=`cat $filename`\nfor line in $filelines ; \ndo\n\tlinks -dump \"https://twitter.com/TARGET/status/$line\" | grep --after-context=1 --before-context=3 \"$1\"\ndone\n" }, { "alpha_fraction": 0.7137907147407532, "alphanum_fraction": 0.7186416983604431, "avg_line_length": 24.298246383666992, "blob_id": "3f531214d0bfef4825ede278f5b4fb7379001073", "content_id": "d54007b2bd2d0e0f6126e45748a50f1c65ebd6df", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1443, "license_type": "no_license", "max_line_length": 135, "num_lines": 57, "path": "/stalkerbot.py", "repo_name": "MasterChenb0x/stalkerbot", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\nimport sys\nimport os\nimport re\nimport random\nimport time\nimport getopt\nimport tweepy\n\n#-- Twitter Instance setup\nTWITTER = open(\"/home/chen/Documents/TwiTokens.txt\", \"r\").read().splitlines()\nauth = tweepy.OAuthHandler(TWITTER[0], TWITTER[1])\nauth.set_access_token(TWITTER[2], TWITTER[3])\n\napi = tweepy.API(auth, wait_on_rate_limit=True)\n#--\n\n'''\nThis small script just does what the \"grabber.rb\" script did before. Slightly cleaner? My code I mean; not necessarily the language :-P\n'''\n\nmyFriends = []\ntargetFollowers = []\ntargetFriends = []\n\n# Get baseline friendlist\nmyFriends = api.friends_ids(screen_name='<YOUR HANDLE')\nprint(\"---My Friends---\")\nprint(myFriends)\n\n# Get list of followers of target\ntargetFollowers = api.followers_ids(screen_name='<YOUR TARGET')\nprint(\"---Target Followers---\")\nprint(targetFollowers)\n\n# Get list of friends of target\ntargetFriends = api.friends_ids(screen_name='<YOUR HANDLE>')\nprint(\"---Target Friends---\")\nprint(targetFriends)\n\nprint(len(myFriends))\nprint(len(targetFollowers))\nprint(len(targetFriends))\ntarget_scope = targetFollowers + targetFriends\nprint(len(target_scope))\n\n\nto_follow = list(set(targetFollowers) - set(myFriends)) + list(set(myFriends) - set(targetFollowers))\nprint(len(to_follow))\n\nfor i in targetFollowers:\n try:\n api.create_friendship(user_id=i)\n time.sleep(10) # avoid rate limit\n print(f\"Following: {i}\")\n except:\n pass\n\n" }, { "alpha_fraction": 0.5927152037620544, "alphanum_fraction": 0.5993377566337585, "avg_line_length": 24.16666603088379, "blob_id": "5d9c5889438ee0ebd4108462744ea3a4d70ccff2", "content_id": "8b6b7b5a6e8cdcd0d2a68416138d9f36c5772dd3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 302, "license_type": "no_license", "max_line_length": 68, "num_lines": 12, "path": "/parser.sh", "repo_name": "MasterChenb0x/stalkerbot", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\n#Strip \".txt\" from the filenames for better data mining later\nfor x in $( ls ); do\n\tFRIEND=`basename $x .txt`\n\tcp $x $FRIEND\ndone\n\n#For every file, strip any part of the string that is NOT a Tweet ID\nfor a in $( ls ); do\n\tcat $a | tr -c [0-9] ' ' | tr -s ' ' | tr ' ' '\\n' > $a\"_new\"\ndone\n" }, { "alpha_fraction": 0.7524116039276123, "alphanum_fraction": 0.7524116039276123, "avg_line_length": 37.875, "blob_id": "48bc7d6ee09980abe70440b8068c473b17ac4789", "content_id": "fd8e983c545877358b7a54798a97df5c751c82f3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 311, "license_type": "no_license", "max_line_length": 121, "num_lines": 8, "path": "/tweet_resolve_ids.sh", "repo_name": "MasterChenb0x/stalkerbot", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\n#This script simply resolves the twitter IDs gathered from previous scripts and turns them into actual Twitter usernames.\n#Making data collection slightly more human readable.\nfor a in $( ls /home/chen/target_friends/ );\ndo\n\tlinks -dump \"https://twitter.com/intent/user?user_id=$a\" | grep \"@\"\ndone\n" }, { "alpha_fraction": 0.7219178080558777, "alphanum_fraction": 0.7465753555297852, "avg_line_length": 32.181819915771484, "blob_id": "e4eba9c6e76f25c55f7d4a7d02298f112dc3cb6d", "content_id": "7014327b96d87af611dc5cc0398871708905fb1f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 730, "license_type": "no_license", "max_line_length": 257, "num_lines": 22, "path": "/README.md", "repo_name": "MasterChenb0x/stalkerbot", "src_encoding": "UTF-8", "text": "# StalkerBot\n\nDo not follow your target directly. Instead follow their followers and analyze social interaction without arousing suspicion from your target. What about their friends? Friends implies mutual following. You'd scrape their friends along with their followers.\n\n\nby MasterChen\n-------------\n7/11/2021: Added python script\n7/28/2015: Added sanitized files in preparation for DC Skytalks\n\n\n\nOrder of execution\n==================\n1. grabber.rb OR stalkerbot.py\n2. parser.sh\n3. tweet_scrape_target.sh\n4. tweet__resolve_ids.sh\n\nDISCLAIMER\n==================\nThis project is not to condone cyberstalking, but rather to promote awareness of such capabilities and techniques. Do NOT be a trash human being. Stalking is NOT ok.\n" } ]
6
LucaCappelletti94/compress_json
https://github.com/LucaCappelletti94/compress_json
6ce9c90a6aa3aae5140d330d93b00bc39f7733f8
9c2ff35ba893607101ed0ae506ee72a45b2cecca
467a1e0d0f7a0cfdb998b964c5cde275c859e857
refs/heads/master
2023-02-07T14:55:43.193994
2023-01-30T16:32:15
2023-01-30T16:32:15
208,207,285
16
3
null
null
null
null
null
[ { "alpha_fraction": 0.7333333492279053, "alphanum_fraction": 0.7333333492279053, "avg_line_length": 39.71428680419922, "blob_id": "e3c3e7c1dfb224d3119e160fec451a6e7a3aadc4", "content_id": "77b4c254a888b846226f16cabc6954dd047288de", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 285, "license_type": "permissive", "max_line_length": 92, "num_lines": 7, "path": "/compress_json/__init__.py", "repo_name": "LucaCappelletti94/compress_json", "src_encoding": "UTF-8", "text": "\"\"\"Compress JSON is a package to handle reading and writing of compressed JSON documents.\"\"\"\nfrom .compress_json import load, dump, local_load, local_dump\nfrom support_developer import support_luca\n\nsupport_luca(\"compress_json\")\n\n__all__ = [\"load\", \"dump\", \"local_load\", \"local_dump\"]\n" }, { "alpha_fraction": 0.7228464484214783, "alphanum_fraction": 0.7228464484214783, "avg_line_length": 37.14285659790039, "blob_id": "35ab94588cc6be43a86905839cee1846c0792f9d", "content_id": "4362c8485ebaa5c66bf6cacc92cf81aaae9e3bea", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 267, "license_type": "permissive", "max_line_length": 55, "num_lines": 7, "path": "/tests/test_version.py", "repo_name": "LucaCappelletti94/compress_json", "src_encoding": "UTF-8", "text": "\"\"\"Test that version code is correct.\"\"\"\nfrom validate_version_code import validate_version_code\nfrom compress_json.__version__ import __version__\n\ndef test_version():\n \"\"\"Tests whether the version code is correct.\"\"\"\n assert validate_version_code(__version__)\n" }, { "alpha_fraction": 0.7022653818130493, "alphanum_fraction": 0.7022653818130493, "avg_line_length": 33.33333206176758, "blob_id": "bc3c8df93d3b310c85fc4e4b20049f9e58b1e0e7", "content_id": "672c77b92fcdfa0666bc989331b8b46056b66227", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 309, "license_type": "permissive", "max_line_length": 71, "num_lines": 9, "path": "/tests/test_local_path.py", "repo_name": "LucaCappelletti94/compress_json", "src_encoding": "UTF-8", "text": "\"\"\"Test if the local path feature is working as expected.\"\"\"\nimport os\nfrom .utils import local_call\n\n\ndef test_local_path():\n \"\"\"Test whether the local path feature is working as expected.\"\"\"\n target = os.sep.join(\"/compress_json/tests/object.json\".split(\"/\"))\n assert local_call().endswith(target)\n" }, { "alpha_fraction": 0.6598984599113464, "alphanum_fraction": 0.6598984599113464, "avg_line_length": 25.266666412353516, "blob_id": "e2e79129816fd71d071d243107609935ab8073cd", "content_id": "f2e02f6ce86fb318d396c58226c89040ed6d3fa6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 394, "license_type": "permissive", "max_line_length": 80, "num_lines": 15, "path": "/tests/test_wrong_path.py", "repo_name": "LucaCappelletti94/compress_json", "src_encoding": "UTF-8", "text": "\"\"\"Test behaviour on wrong parameters.\"\"\"\nimport pytest\nimport compress_json\n\n\ndef test_wrong_path():\n \"\"\"In this test we check whether the method correctly rises an exception.\"\"\"\n with pytest.raises(ValueError):\n compress_json.load({})\n\n with pytest.raises(ValueError):\n compress_json.dump({}, {})\n\n with pytest.raises(ValueError):\n compress_json.dump(\"\", {})\n" }, { "alpha_fraction": 0.6747337579727173, "alphanum_fraction": 0.6960309743881226, "avg_line_length": 35.89285659790039, "blob_id": "9d58ece5bd7d28c8aedfbcd0c03bc60f74c10dbc", "content_id": "b922c1c7af9a4aa6bf1f6ced443ea5471044ba9d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1033, "license_type": "permissive", "max_line_length": 85, "num_lines": 28, "path": "/tests/test_compress_json.py", "repo_name": "LucaCappelletti94/compress_json", "src_encoding": "UTF-8", "text": "\"\"\"Tests whether all compression algorithms work on a random dictionary imput.\"\"\"\nimport shutil\n\nfrom dict_hash import sha256\nfrom random_dict import random_string_dict\nimport compress_json\n\n\ndef test_compress_json():\n \"\"\"Tests whether all compression algorithms work on a random dictionary imput.\"\"\"\n dictionary = random_string_dict(10, 10)\n key = sha256(dictionary)\n extensions = compress_json.compress_json._DEFAULT_EXTENSION_MAP.keys()\n for ext in extensions:\n path = f\"random_dirs/test.json.{ext}\"\n compress_json.dump(dictionary, path)\n assert key == sha256(compress_json.load(path))\n assert key == sha256(compress_json.load(path, use_cache=True))\n\n shutil.rmtree(\"random_dirs\")\n\n for ext in extensions:\n path = f\"random_dirs/test.json.{ext}\"\n compress_json.local_dump(dictionary, path)\n assert key == sha256(compress_json.local_load(path))\n assert key == sha256(compress_json.local_load(path, use_cache=True))\n\n shutil.rmtree(\"tests/random_dirs\")\n" }, { "alpha_fraction": 0.6545454263687134, "alphanum_fraction": 0.6545454263687134, "avg_line_length": 17.66666603088379, "blob_id": "5f4a04f8bdc0ace638540652821dc85342f0013b", "content_id": "2312318e71f9ae05dde7e46e45038f604b05d69a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 55, "license_type": "permissive", "max_line_length": 29, "num_lines": 3, "path": "/tests/utils/__init__.py", "repo_name": "LucaCappelletti94/compress_json", "src_encoding": "UTF-8", "text": "from .utils import local_call\n\n__all__ = [\"local_call\"]" }, { "alpha_fraction": 0.7547169923782349, "alphanum_fraction": 0.7547169923782349, "avg_line_length": 25.75, "blob_id": "24b5f518462b6f24388bfb39e60fb5a353201166", "content_id": "f8cca8d5cc4e8123cd4a6ecc0898b89123bea1be", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 106, "license_type": "permissive", "max_line_length": 50, "num_lines": 4, "path": "/tests/utils/utils.py", "repo_name": "LucaCappelletti94/compress_json", "src_encoding": "UTF-8", "text": "from compress_json.compress_json import local_path\n\ndef local_call():\n return local_path(\"object.json\")" }, { "alpha_fraction": 0.5991042852401733, "alphanum_fraction": 0.6018603444099426, "avg_line_length": 27.739273071289062, "blob_id": "572fa01985bd0d6425a0dd76b50e9cce5615d1b6", "content_id": "82b0041f94db9064c666d9d04f90a66e03a11c6c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8708, "license_type": "permissive", "max_line_length": 95, "num_lines": 303, "path": "/compress_json/compress_json.py", "repo_name": "LucaCappelletti94/compress_json", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nA thin wrapper of standard ``json`` with standard compression libraries.\n\"\"\"\nimport json\nfrom typing import Dict, Any, Optional\nimport traceback\nimport os\n\n__all__ = [\n \"dump\",\n \"load\",\n \"local_dump\",\n \"local_load\"\n]\n\n_DEFAULT_EXTENSION_MAP = {\n \"json\": \"json\",\n \"gz\": \"gzip\",\n \"bz\": \"bz2\",\n \"lzma\": \"lzma\"\n}\n\n_DEFAULT_COMPRESSION_WRITE_MODES = {\n \"json\": \"w\",\n \"gzip\": \"wt\",\n \"bz2\": \"wt\",\n \"lzma\": \"wt\"\n}\n\n_DEFAULT_COMPRESSION_READ_MODES = {\n \"json\": \"r\",\n \"gzip\": \"rt\",\n \"bz2\": \"rt\",\n \"lzma\": \"rt\"\n}\n\n_CACHE = {}\n\n\ndef get_compression_write_mode(compression: str) -> str:\n \"\"\"Return mode for opening file buffer for writing.\n\n Parameters\n -------------------\n compression: str\n The extension of the compression to be used.\n\n Returns\n -------------------\n The code to use for opening the file in write mode.\n \"\"\"\n return _DEFAULT_COMPRESSION_WRITE_MODES[compression]\n\n\ndef get_compression_read_mode(compression: str) -> str:\n \"\"\"Return mode for opening file buffer for reading.\n\n Parameters\n -------------------\n compression: str\n The extension of the compression to be used.\n\n Returns\n -------------------\n The code to use for opening the file in read mode.\n \"\"\"\n return _DEFAULT_COMPRESSION_READ_MODES[compression]\n\n\ndef infer_compression_from_filename(filename: str) -> str:\n \"\"\"Return the compression protocal inferred from given filename.\n\n Parameters\n ----------\n filename: str\n The filename for which to infer the compression protocol\n \"\"\"\n return _DEFAULT_EXTENSION_MAP[filename.split(\".\")[-1]]\n\n\ndef dump(\n obj: Any,\n path: str,\n compression_kwargs: Optional[Dict] = None,\n json_kwargs: Optional[Dict] = None,\n encoding: str = \"utf-8\",\n):\n \"\"\"Dump the contents of an object to disk as json using the detected compression protocol.\n\n Parameters\n ----------------\n obj: any\n The object that will be saved to disk\n path: str\n The path to the file to which to dump ``obj``\n compression_kwargs: Optional[Dict] = None\n Keywords argument to pass to the compressed file opening protocol.\n json_kwargs: Optional[Dict] = None\n Keywords argument to pass to the json file opening protocol.\n encoding: str = \"utf-8\"\n The encoding to use to dump the document. By default, UTF8.\n\n Raises\n ----------------\n ValueError\n If given path is not a valid string.\n \"\"\"\n if not isinstance(path, str):\n if isinstance(obj, str):\n raise ValueError(\n (\n \"The object you have provided to the dump method is a string \"\n \"while the object you have provided as a path is NOT a string \"\n \"but an object of type {}. Maybe you need to swap them?\"\n ).format(type(path))\n )\n raise ValueError(\"The given path is not a string.\")\n \n compression_kwargs = {} if compression_kwargs is None else compression_kwargs\n json_kwargs = {} if json_kwargs is None else json_kwargs\n compression = infer_compression_from_filename(path)\n mode = get_compression_write_mode(compression)\n\n directory = os.path.dirname(path)\n if directory:\n os.makedirs(directory, exist_ok=True)\n\n if compression is None or compression == \"json\":\n fout = open(path, mode=mode, encoding=encoding, **compression_kwargs)\n elif compression == \"gzip\":\n import gzip\n\n fout = gzip.open(path, mode=mode, encoding=encoding,\n **compression_kwargs)\n elif compression == \"bz2\":\n import bz2\n\n fout = bz2.open(path, mode=mode, encoding=encoding,\n **compression_kwargs)\n elif compression == \"lzma\":\n import lzma\n\n fout = lzma.open(path, mode=mode, encoding=encoding,\n **compression_kwargs)\n with fout:\n json.dump(obj, fout, **json_kwargs)\n\n\ndef load(\n path: str,\n compression_kwargs: Optional[Dict] = None,\n json_kwargs: Optional[Dict] = None,\n encoding: str = \"utf-8\",\n use_cache: bool = False\n):\n \"\"\"Return json object at given path uncompressed with detected compression protocol.\n\n Parameters\n ----------\n path: str\n The path to the file from which to load the ``obj``\n compression_kwargs: Optional[Dict] = None\n Keywords argument to pass to the compressed file opening protocol.\n json_kwargs: Optional[Dict] = None\n Keywords argument to pass to the json file opening protocol.\n encoding: str = \"utf-8\"\n The encoding to use to load the document. By default, UTF8.\n use_cache: bool = False\n Whether to put loaded JSON files in a static cache object.\n\n Raises\n ----------------\n ValueError\n If given path is not a valid string.\n \"\"\"\n if not isinstance(path, str):\n raise ValueError(\"The given path is not a string.\")\n\n if use_cache and path in _CACHE:\n return _CACHE[path]\n\n compression_kwargs = {} if compression_kwargs is None else compression_kwargs\n json_kwargs = {} if json_kwargs is None else json_kwargs\n compression = infer_compression_from_filename(path)\n mode = get_compression_read_mode(compression)\n\n if compression is None or compression == \"json\":\n file = open(path, mode=mode, encoding=encoding, **compression_kwargs)\n elif compression == \"gzip\":\n import gzip\n file = gzip.open(path, mode=mode, encoding=encoding,\n **compression_kwargs)\n elif compression == \"bz2\":\n import bz2\n file = bz2.open(path, mode=mode, encoding=encoding,\n **compression_kwargs)\n elif compression == \"lzma\":\n import lzma\n file = lzma.open(path, mode=mode, encoding=encoding,\n **compression_kwargs)\n with file:\n json_content = json.load(file, **json_kwargs)\n\n if use_cache:\n _CACHE[path] = json_content\n\n return json_content\n\n\ndef local_path(relative_path: str) -> str:\n \"\"\"Return path localized to caller function.\n\n Parameters\n -----------------------\n relative_path: str\n The path to be made absolute to the caller function.\n\n Returns\n -----------------------\n The absolute path with as root the caller function.\n \"\"\"\n return os.path.join(\n os.path.dirname(\n os.path.abspath(\n traceback.extract_stack()[-3].filename\n )\n ),\n relative_path\n )\n\n\ndef local_load(\n path: str,\n compression_kwargs: Optional[Dict] = None,\n json_kwargs: Optional[Dict] = None,\n encoding: str = \"utf-8\",\n use_cache: bool = False\n) -> Any:\n \"\"\"Return json object at given local path uncompressed with detected compression protocol.\n\n Parameters\n ----------\n path: str\n The path to the local file from which to load the ``obj``\n compression_kwargs: Optional[Dict] = None\n keywords argument to pass to the compressed file opening protocol.\n json_kwargs: Optional[Dict] = None\n keywords argument to pass to the json file opening protocol.\n encoding: str = \"utf-8\"\n The encoding to use to load the document. By default, UTF8.\n use_cache: bool = False\n Whether to put loaded JSON files in a static cache object.\n\n Raises\n ----------------\n ValueError,\n If given path is not a valid string.\n \"\"\"\n return load(\n pah=local_path(path),\n compression_kwargs=compression_kwargs,\n json_kwargs=json_kwargs,\n encoding=encoding,\n use_cache=use_cache\n )\n\n\ndef local_dump(\n obj: Any,\n path: str,\n compression_kwargs: Optional[Dict] = None,\n json_kwargs: Optional[Dict] = None,\n encoding: str = \"utf-8\",\n):\n \"\"\"Dump the contents of an object to disk as json, using the detected compression protocol.\n\n Parameters\n ----------\n obj: Any\n The object that will be saved to disk\n path: str\n The local path to the file to which to dump ``obj``\n compression_kwargs: Optional[Dict] = None\n keywords argument to pass to the compressed file opening protocol.\n json_kwargs: Optional[Dict] = None\n keywords argument to pass to the json file opening protocol.\n encoding: str = \"utf-8\"\n The encoding to use to dump the document. By default, UTF8.\n\n\n Raises\n ----------------\n ValueError\n If given path is not a valid string.\n \"\"\"\n dump(\n obj,\n path=local_path(path),\n compression_kwargs=compression_kwargs,\n json_kwargs=json_kwargs,\n encoding=encoding\n )\n" }, { "alpha_fraction": 0.6237799525260925, "alphanum_fraction": 0.6273291707038879, "avg_line_length": 33.14393997192383, "blob_id": "713b108a9b501c560e63c398929c18a63eb8cc7d", "content_id": "8af5ca71f34c67849b52bb1ead7e14a5979222a0", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 4508, "license_type": "permissive", "max_line_length": 239, "num_lines": 132, "path": "/README.rst", "repo_name": "LucaCappelletti94/compress_json", "src_encoding": "UTF-8", "text": "compress_json\n=========================================================================================\n|pip| |downloads|\n\nThe missing Python utility to read and write large compressed JSONs.\n\nThe library is loosely based on the `compress_pickle <https://github.com/lucianopaz/compress_pickle>`_ library.\n\nHow do I install this package?\n----------------------------------------------\nAs usual, just download it using pip:\n\n.. code:: shell\n\n pip install compress_json\n\nAvailable compression modes\n----------------------------------------------\nThe compression modes, detected automatically by the file name, are **gzip**, **bz2** and **lzma**,\nwith the notable exception of **zip** which seems difficult to integrate in the JSON pipeline.\n\nUsage example\n----------------------------------------------\nThe library is extremely easy to use:\n\n.. code:: python\n\n import compress_json\n \n D = {\n \"A\":{\n \"B\":\"C\"\n }\n }\n compress_json.dump(D, \"filepath.json.gz\") # for a gzip file\n compress_json.dump(D, \"filepath.json.bz\") # for a bz2 file\n compress_json.dump(D, \"filepath.json.lzma\") # for a lzma file\n\n D1 = compress_json.load(\"filepath.json.gz\") # for loading a gzip file\n D2 = compress_json.load(\"filepath.json.bz\") # for loading a bz2 file\n D3 = compress_json.load(\"filepath.json.lzma\") # for loading a lzma file\n\n\nSome extra perks: local loading and dumping\n----------------------------------------------\nThe library makes available, other than the usual load and dump from the JSON library, the methods local_load and local_dump, which let you load and dump file in the same directory of wherever you are calling them, by using the call stack.\n\nThis can get useful, especially when loading files within packages.\n\n.. code:: python\n\n import compress_json\n \n D = {\n \"A\": {\n \"B\": \"C\"\n }\n }\n compress_json.local_dump(D, \"filepath.json.gz\") # for a gzip file\n compress_json.local_dump(D, \"filepath.json.bz\") # for a bz2 file\n compress_json.local_dump(D, \"filepath.json.lzma\") # for a lzma file\n\n D1 = compress_json.local_load(\"filepath.json.gz\") # for loading a gzip file\n D2 = compress_json.local_load(\"filepath.json.bz\") # for loading a bz2 file\n D3 = compress_json.local_load(\"filepath.json.lzma\") # for loading a lzma file\n\nLoading with RAM cache\n----------------------------------------------\nSometimes you need to load a compressed JSON file a LOT of times, and you may want to\nput this document in a cache or something of the sorts. Fortunately, we already provide\nthis option for you:\n\n.. code:: python\n\n import compress_json\n \n D1 = compress_json.load(\n \"filepath.json.gz\",\n use_cache=True\n )\n\n D1 = compress_json.local_load(\n \"filepath.json.gz\",\n use_cache=True\n )\n\nAdvanced usage\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\nYou can pass parameters to either the chosen compression mode or the JSON library.\n\nWith the :code:`json_kwargs` parameter you can specify any of the kwargs that should\nbe forwarded to the JSON library method, which you can obtain for your Python version\nby running :code:`help(json.dump)` and :code:`help(json.load)`, depending whether you are\ndumping or loading the json object.\n\nAnalogously, with the :code:`compression_kwargs` parameter you can specify any parameter that\nhas to be forwarded to the compression library that you intend to use, whether that is\n`lzma`, :code:`gzip` or :code:`bz2`, and as per JSON will depend on which version you have installed.\n\nWhether you are dumping or loading a compressed JSON object, you can get the list of parameters you\nhave available to forward to the compression method by running :code:`help(lzma.open)`, :code:`help(gzip.open)`\nor :code:`help(bz2.open)`, respectively.\n\n.. code:: python\n\n import compress_json\n \n D = {\n \"A\": {\n \"B\": \"C\"\n }\n }\n compress_json.dump(\n D, \"filepath.json.gz\",\n compression_kwargs = {kwargs go here},\n json_kwargs = {kwargs go here}\n )\n\n D4 = compress_json.load(\n \"filepath.json.gz\",\n compression_kwargs = {kwargs go here},\n json_kwargs = {kwargs go here}\n )\n\n\n.. |pip| image:: https://badge.fury.io/py/compress-json.svg\n :target: https://badge.fury.io/py/compress-json\n :alt: Pypi project\n\n.. |downloads| image:: https://pepy.tech/badge/compress-json\n :target: https://pepy.tech/badge/compress-json\n :alt: Pypi total project downloads \n" }, { "alpha_fraction": 0.6000000238418579, "alphanum_fraction": 0.6428571343421936, "avg_line_length": 34, "blob_id": "1f5447c156f081fa135d38a21900fe2f616b4a95", "content_id": "0cea015fda350c83603727992f4017c690a2fbad", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 70, "license_type": "permissive", "max_line_length": 47, "num_lines": 2, "path": "/compress_json/__version__.py", "repo_name": "LucaCappelletti94/compress_json", "src_encoding": "UTF-8", "text": "\"\"\"Current version of package compress_json.\"\"\"\n__version__ = \"1.0.8\"\n" } ]
10
natasha41575/CS131
https://github.com/natasha41575/CS131
9eeba93be31de92f286af9e5e009425c83e7d0fe
ed0212e4979d4bd7b6721b7e5e58dd692b85e02b
a8136a079ae5bff1f49cef1dadcbd92b7f0cd61c
refs/heads/master
2022-01-22T11:15:12.385535
2019-07-03T03:49:33
2019-07-03T03:49:33
194,974,889
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.496515691280365, "alphanum_fraction": 0.5156794190406799, "avg_line_length": 32.764705657958984, "blob_id": "58b7e40393f113fcfbde7cb9fa3f2af31dda9297", "content_id": "d6acb7432414716e6c0fd85a1e47693edf1b86ba", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1148, "license_type": "no_license", "max_line_length": 69, "num_lines": 34, "path": "/project/Test.java", "repo_name": "natasha41575/CS131", "src_encoding": "UTF-8", "text": "// driver class \npublic class Test \n{ \n public static void main(String args[]) \n { \n // using superclass reference \n // first approach \n Bicycle mb2 = new MountainBike(4, 200, 20); \n \n // using subclass reference( ) \n // second approach \n MountainBike mb1 = new MountainBike(3, 100, 25); \n mb1.setHeight(\"potato\");\n \n System.out.println(\"seat height of first bicycle is \" \n + mb1.seatHeight); \n \n // In case of overridden methods \n // always subclass \n // method will be executed \n System.out.println(mb1.toString()); \n System.out.println(mb2.toString()); \n \n /* The following statement is invalid because Bicycle \n does not define a seatHeight. \n // System.out.println(\"seat height of second bicycle is \" \n + mb2.seatHeight); */\n \n /* The following statement is invalid because Bicycle \n does not define setHeight() method. \n mb2.setHeight(21);*/\n \n } \n} " }, { "alpha_fraction": 0.6337448358535767, "alphanum_fraction": 0.6748971343040466, "avg_line_length": 12.5, "blob_id": "d10f6250e5b0b23786d9c5b475e5d9dfb694f64f", "content_id": "a5534395bb8f97e7119290293fc199453ce09a15", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 243, "license_type": "no_license", "max_line_length": 53, "num_lines": 18, "path": "/hw6/Makefile", "repo_name": "natasha41575/CS131", "src_encoding": "UTF-8", "text": "# Name:\t\tNatasha Sarkar\n# # Email: \[email protected]\n# # ID: \t904743795\n#\n#\n#\n\ndefault:\n\tkotlinc everyNth.kt -include-runtime -d everyNth.jar\n\ncheck: default\n\tkotlin everyNth.jar\n\nclean:\n\trm -f everyNth.jar\n\ndist:\n\ttar -cvzf hw6.tar.gz Makefile everyNth.kt\n" }, { "alpha_fraction": 0.6526104211807251, "alphanum_fraction": 0.6596385836601257, "avg_line_length": 17.79245376586914, "blob_id": "5176530e08821c87bac7f5408330e5e81c1fb646", "content_id": "2fcc4f2d37e261f88d8586b1a55e10764950f965", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 996, "license_type": "no_license", "max_line_length": 63, "num_lines": 53, "path": "/hw3/BetterSafe.java", "repo_name": "natasha41575/CS131", "src_encoding": "UTF-8", "text": "/* Design and implement a new class BetterSafe of your choice, \n * which achieves better performance than Synchronized while \n * retaining 100% reliability.\n */\n\nimport java.util.concurrent.locks.ReentrantLock;\nimport java.util.concurrent.locks.Lock;\n\nclass BetterSafe implements State {\n\tprivate byte[] value;\n\tprivate byte maxval;\n\tprivate Lock rlock;\n\n\t/* constructors */\n\n\tBetterSafe(byte[] v) {\n\t\tthis.value = v;\n\t\tthis.maxval = 127;\n\t\tthis.rlock = new ReentrantLock();\n\t}\n\n\tBetterSafe(byte[] v, byte m) {\n\t\tthis.value = v;\n\t\tthis.maxval = m;\n\t\tthis.rlock = new ReentrantLock();\n\t}\n\n\t/* methods */\n\n\t@Override\n\tpublic int size() {\n\t\treturn this.value.length;\n\t}\n\n\t@Override\n\tpublic byte[] current() {\n\t\treturn this.value;\n\t}\n\n\t@Override\n\tpublic boolean swap(int i, int j) {\n\t\trlock.lock();\n\t\tif (this.value[i] <= 0 || this.value[j] >= this.maxval) {\n\t\t\tthis.rlock.unlock();\n\t\t\treturn false;\n\t\t} else {\n\t\t\tthis.value[i]--;\n\t\t\tthis.value[j]++;\n\t\t\tthis.rlock.unlock();\n\t\t\treturn true;\n\t\t}\n\t}\n}\n" }, { "alpha_fraction": 0.3435225486755371, "alphanum_fraction": 0.6754003167152405, "avg_line_length": 37.22222137451172, "blob_id": "59425b76ade5ab2ce4a3f931cd9b10edbf3db608", "content_id": "243fad66002ac703daaff5968de6bdd50b8fe228", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 687, "license_type": "no_license", "max_line_length": 369, "num_lines": 18, "path": "/hw3/tests.sh", "repo_name": "natasha41575/CS131", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\nmax_int=127\ntransitions=1000000\nmodels=(Null Synchronized Unsynchronized GetNSet BetterSafe)\nthreads=(8 16 32)\n\necho -e \"Using\" $transitions \"transitions\\n\"\n\nfor model in ${models[*]}\ndo\n for thread in ${threads[*]}\n do\n echo $model \"model with\" $thread \"threads:\"\n java UnsafeMemory $model $thread $transitions 127 32 80 25 10 53 125 109 53 109 86 87 44 124 86 125 2 17 123 67 126 63 8 126 10 18 46 9 9 67 124 2 34 26 119 8 97 53 12 6 30 34 10 90 112 118 126 16 75 55 14 1 4 44 38 108 44 86 114 66 109 114 47 91 61 127 86 46 18 108 45 85 34 36 96 32 88 19 78 50 87 44 8 102 48 47 60 117 70 99 61 51 50 110 4 40 19 116 58 2 0 \n echo\n done\ndone" }, { "alpha_fraction": 0.5845845937728882, "alphanum_fraction": 0.6136136054992676, "avg_line_length": 23.975000381469727, "blob_id": "bf31e4ade1cd8a10275495b55e723284f4235bce", "content_id": "6e5d4714c7b5bc8adbf0cd6bf8b7dc80d970ed6b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 999, "license_type": "no_license", "max_line_length": 65, "num_lines": 40, "path": "/hw6/everyNth.kt", "repo_name": "natasha41575/CS131", "src_encoding": "UTF-8", "text": "fun <T: Any> everyNth(L: List<T>, N: Int): List<T> {\n\tif (N == 0) {\n\t\treturn listOf<T>()\n\t}\n\n\t@Suppress(\"UNCHECKED_CAST\")\n\tvar array: Array<T> = arrayOfNulls<Any?>(L.size / N) as Array<T>\n\tvar x = 0\n\tfor (i in N-1..(L.size-1) step N) {\n\t\tarray.set(x, L.get(i))\n\t\tx = x + 1\n\t}\n\t\n\treturn array.toList()\n}\n\n\nfun main(args : Array<String>) {\n\tvar list = listOf<Int>()\n\tassert( (listOf<Int>()).equals(everyNth(list, 0)) )\n\tprintln(\"TEST 1 SUCCESS\")\n\n\tlist = listOf<Int>(1)\n assert( (listOf<Int>()).equals(everyNth(list, 5)) )\n println(\"TEST 2 SUCCESS\")\n\n\tlist = listOf<Int>(1, 2, 3, 4)\n\tassert( (listOf<Int>(2,4)).equals(everyNth(list, 2)) )\n \tprintln(\"TEST 3 SUCCESS\")\n\n\tvar list2 = listOf<String>(\"potato\", \"rotato\", \"totato\")\n\tassert( (listOf<String>(\"rotato\")).equals(everyNth(list2, 2)) )\n\tprintln(\"TEST 4 SUCCESS\")\n\n\tassert( (listOf<String>(\"totato\")).equals(everyNth(list2, 3)) )\n\tprintln(\"TEST 5 SUCCESS\")\n\n\tassert( (list2).equals(everyNth(list2, 1)) )\n\tprintln(\"TEST 6 SUCCESS\")\n}\n" }, { "alpha_fraction": 0.6176878809928894, "alphanum_fraction": 0.6297109723091125, "avg_line_length": 26.460317611694336, "blob_id": "9d3b38f54118dd87a978c34f855e03d82678dc3a", "content_id": "37a2d18eee80e386ecc278332e0ae57eca7fb412", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8650, "license_type": "no_license", "max_line_length": 179, "num_lines": 315, "path": "/project/server.py", "repo_name": "natasha41575/CS131", "src_encoding": "UTF-8", "text": "import sys\nimport asyncio\nimport aiohttp\nimport async_timeout\nimport json\nimport time\n\nmy_api_key = 'AIzaSyD3OvCxDesRz2k1IOsmI0Ex6i_79m99WPQ'\n\nports = {\n\t'Goloman': 12275,\n\t'Hands': 12276,\n\t'Holiday': 12277,\n\t'Welsh': 12278,\n\t'Wilkes': 12279\n}\n\nconnection_dict = {\n 'Goloman': ['Hands', 'Holiday', 'Wilkes'],\n 'Hands': ['Goloman', 'Wilkes'],\n 'Holiday': ['Goloman', 'Welsh', 'Wilkes'],\n 'Wilkes': ['Goloman', 'Hands', 'Holiday'],\n 'Welsh': ['Holiday'],\n}\n\n\nclient_info = dict()\nall_items_dict = dict()\n\n\nasync def output_log(message):\t\n\n\tif message != None:\n\t\ttry:\n\t\t\tserver_log.write(message)\n\t\texcept:\n\t\t\tprint('could not log message: ' + message)\n\nasync def send_resp(wrtr, message):\n\tif message != None:\n\t\ttry:\n\t\t\tencoded_message = message.encode()\n\t\t\twrtr.write(encoded_message)\n\t\t\t#print(\"sending response: \" + message + '\\n')\n\t\t\tawait wrtr.drain()\n\t\t\twrtr.write_eof()\n\t\texcept:\n\t\t\tprint('error writing message: ' + message + '\\n')\n\n\nasync def handle_iamat(clntnm, coordinates, time_received, time_sent, wrtr):\n\t\n\tdef get_coordinates(coords):\n\t\tn = 0\n\t\ti = 0\n\t\tfor c in coords:\n\t\t\tif c == '+':\n\t\t\t\tn = n + 1\n\t\t\telif c == '-':\n\t\t\t\tn = n + 1\n\t\t\tif n == 2:\n\t\t\t\ttry:\n\t\t\t\t\treturn coords[:(i-1)], coords[i:]\n\t\t\t\texcept:\n\t\t\t\t\treturn '', ''\n\t\t\ti = i + 1\n\t\treturn '', ''\n\t\n\tlatt, lon = get_coordinates(coordinates)\n\tif latt == '':\n\t\t#print('invalid coordinates')\n\t\tawait output_log('invalid coordinates')\n\t\tprint('? ' + cmd)\n\t\tawait send_resp(wrtr, '? ' + cmd)\n\t\treturn None\n\n\ttime_received = float(time_received)\n\tif clntnm in client_info and client_info[clntnm]['time command received'] > time_received:\n\t\toutput_response = 'AT %s %s %s %s %s' % (srv, str(lag), clntnm, coordinates, str(time_received))\n\t\tawait output_log('sending iamat response: ' + output_response + '\\n')\n\t\tprint(output_response)\n\t\tawait send_resp(wrtr, output_response)\n\t\treturn None\n\n\tlag = time_received - time_sent\n\toutput_response = 'AT %s %s %s %s %s' % (srv, str(lag), clntnm, coordinates, str(time_received))\n\tawait output_log('sending iamat response: ' + output_response + '\\n')\n\tprint(output_response)\n\n\tclient_info[clntnm] = {\n\t\t'server': srv,\n\t\t'time difference': lag,\n\t\t'time command received': time_received,\n\t\t'latitude': latt,\n\t\t'longitude': lon\n\t}\n\n\t#for key in client_info[clntnm]:\n\t\t#print(key + ':', client_info[clntnm][key])\n\t#print('\\n')\n\t\n\tawait propagate(clntnm)\n\tawait send_resp(wrtr, output_response)\n\n\nasync def handle_whatsat(clntnm, radius, num_results, time_received, cmd, wrtr):\n\t#print(\"handling whatsat command\")\n\n\tradius = int(radius) * 1000\n\tresponse = None\n\n\tif radius > 50000:\n\t\tprint('? ' + cmd)\n\t\tawait output_log(\"radius too large\" + '\\n')\n\t\tawait send_resp(wrtr, '? ' + cmd)\n\t\treturn None\n\n\tif int(num_results) > 20:\n\t\tprint('? ' + cmd)\n\t\tawait output_log(\"Information bound too high\" + '\\n')\n\t\tawait send_resp(wrtr, '? ' + cmd)\n\t\treturn None\n\t\n\ttry:\n\t\tlatitude = client_info[clntnm][\"latitude\"]\n\t\tlongitude = client_info[clntnm][\"longitude\"]\n\texcept:\n\t\tawait output_log('no information about client ' + clntnm + '\\n\\n')\n\t\tprint('? ' + cmd)\n\t\tawait send_resp(wrtr, '? ' + cmd)\n\t\treturn None\n\n\t#print(\"connecting to api with lat, lon, rad: \", latitude, longitude, radius)\n\tawait output_log(\"connecting to api with lat, lon, rad: \" + latitude + ', ' + longitude + ', ' + str(radius) + '\\n')\n\tlocation = latitude + \",\" + longitude\n\turl = 'https://maps.googleapis.com/maps/api/place/nearbysearch/json?key={0}&location={1}&radius={2}'.format(my_api_key, location, str(radius))\n\t\n\tasync with aiohttp.ClientSession() as session:\n\t\tasync with session.get(url) as api_resp:\n\t\t\trs = await api_resp.json()\n\t\t\trs['results'] = rs['results'][:int(num_results)]\n\t\t\tapi_msg = json.dumps(rs, indent=3)\n\n\t\t\toutput_response = 'AT %s %s %s %s %s\\n%s' % (srv, str(client_info[clntnm][\"time difference\"]), clntnm, latitude + longitude, time_received, api_msg)\n\t\t\tx = output_response.split('\\n')\n\t\t\tfor i in x:\n\t\t\t\tif len(i) == 0:\n\t\t\t\t\tx.remove(i)\n\t\t\toutput_response = '\\n'.join(x)\n\t\t\toutput_response = output_response.strip() + '\\n\\n'\n\n\t\t\tawait output_log('sending response: \\n' + output_response)\n\t\t\tprint(output_response)\n\t\t\tawait send_resp(wrtr, output_response)\n\n\nasync def handle_at(clntnm, srv, latt, lon, lag, time_received, wrtr):\n\ttime_received = float(time_received)\n\tif clntnm in client_info:\n\t\tif float(client_info[clntnm]['time command received']) >= time_received: \n\t\t\t#print('received duplicate at command for client ' + clntnm)\n\t\t\tawait output_log('received duplicate at command for client ' + clntnm + '\\n')\n\t\t\treturn\n\n\tclient_info[clntnm] = {\n\t\t'server': srv,\n\t\t'time difference': lag,\n\t\t'time command received': time_received,\n\t\t'latitude': latt,\n\t\t'longitude': lon\n\t}\n\n\tawait propagate(clntnm)\n\n\nasync def propagate(clntnm):\n\tclient = client_info[clntnm]\n\tmy_at_command = 'AT %s %s %s %s %s %s' % (clntnm, client['server'], client['latitude'], client['longitude'], str(client['time difference']), str(client['time command received']))\n\n\t#print(str(client['time difference']), str(client['time command received']))\n\n\tfor cnnctn in connection_dict[srv]:\n\t\tport = ports[cnnctn]\n\t\t#print('propagating ' + clntnm + ' to ' + cnnctn)\n\t\tawait output_log('propagating ' + clntnm + ' to ' + cnnctn)\n\t\t\n\t\ttry:\n\t\t\trdr, wrtr = await asyncio.open_connection('127.0.0.1', port, loop=loop)\n\t\t\tawait output_log('connected successfully\\n')\n\t\t\t#print('connected successfully')\n\t\t\tawait send_resp(wrtr, my_at_command)\n\t\t\tawait output_log('closed connection to ' + cnnctn + '\\n')\n\t\t\n\t\texcept:\n\t\t\t#print('failed to send message to ' + cnnctn)\n\t\t\tawait output_log('failed to send message to ' + cnnctn)\n\n\nasync def get_cmd(wrtr, cmd, token):\n\tif len(token) > 0:\n\t\tcommand_0 = token[0]\n\n\t\tif (cmd != '' and command_0 != 'IAMAT' and command_0 != 'WHATSAT' and command_0 != 'AT'):\n\t\t\tawait output_log('command ' + cmd + 'is invalid\\n')\n\t\t\tawait output_log('? ' + cmd + '\\n')\n\t\t\t#print('command ' + cmd + ' is invalid')\n\t\t\tprint('? ' + cmd)\n\t\t\tawait send_resp(wrtr, '? ' + cmd)\n\t\t\treturn None \n\n\t\tif len(token) > 3:\n\t\t\tawait output_log('received command:' + cmd + '\\n')\n\t\t\t#print('received command:' + cmd)\n\n\t\tcurr_time = time.time()\n\t\tlog_message = 'type: ' + command_0 + '. time received: ' + str(curr_time)\n\t\t#print(log_message)\n\t\tawait output_log(log_message + '\\n')\n\n\t\tif command_0 == 'IAMAT':\n\t\t\ttry:\n\t\t\t\tawait handle_iamat(token[1], token[2], token[3], curr_time, wrtr)\n\t\t\texcept:\n\t\t\t\tawait output_log('invalid command' + cmd + '\\n')\n\t\t\t\tprint('? ' + cmd)\n\t\t\t\tawait send_resp(wrtr, '? ' + cmd)\n\t\telif command_0 == 'WHATSAT':\n\t\t\ttry:\n\t\t\t\tawait handle_whatsat(token[1], token[2], token[3], curr_time, cmd, wrtr)\n\t\t\texcept:\n\t\t\t\tawait output_log('invalid command' + cmd + '\\n')\n\t\t\t\tprint('? ' + cmd)\n\t\t\t\tawait send_resp(wrtr, '? ' + cmd)\n\t\telif command_0 == 'AT':\n\t\t\t#print(token)\n\t\t\ttry:\n\t\t\t\tawait handle_at(token[1], token[2], token[3], token[4], token[5], token[6], wrtr)\n\t\t\texcept:\n\t\t\t\tawait output_log('invalid command' + cmd + '\\n')\n\t\t\t\tprint('? ' + cmd)\n\t\t\t\tawait send_resp(wrtr, '? ' + cmd)\n\t\telse:\n\t\t\tawait output_log('invalid command' + cmd + '\\n')\n\t\t\tprint('? ' + cmd)\n\t\t\tawait send_resp(wrtr, '? ' + cmd)\n\n\telse: \n\t\tawait output_log('invalid command' + cmd + '\\n')\n\t\tprint('? ' + cmd)\n\t\tawait send_resp(wrtr, '? ' + cmd)\n\ndef get_cl(rdr, wrtr):\n\tasync def get_cl_h(rdr, wrtr):\n\t\twhile True:\n\t\t\tif rdr.at_eof():\n\t\t\t\tbreak\n\t\t\tp0 = await rdr.read()\n\t\t\tp1 = p0.decode()\n\t\t\tp2 = p1.split()\n\t\t\tawait get_cmd(wrtr, p1, p2)\n\n\titem = asyncio.ensure_future(get_cl_h(rdr, wrtr))\n\tall_items_dict[item] = (rdr, wrtr)\n\n\tdef end_session(item):\n\t\t#print('client is closed')\n\t\tserver_log.write('client is closed\\n')\n\t\t#print(client_info)\n\t\tdel all_items_dict[item]\n\t\twrtr.close()\n \n\titem.add_done_callback(end_session)\n\ndef main():\n\tnum_args = len(sys.argv)\n\tif num_args != 2:\n\t\tprint(\"incorrect number of arguments\")\n\t\texit()\n\n\tglobal srv\n\tsrv = sys.argv[1]\n\n\tif srv not in ports:\n\t\tprint(\"incorrect name of server\")\n\t\texit()\n\n\tglobal server_log\n\tserver_log = open(srv + \"-log.txt\", 'w+')\n\n\tglobal loop\n\tloop = asyncio.get_event_loop()\n\n\t#print(\"using server \" + srv)\n\tserver_log.write(\"using server \" + srv + '\\n')\n\n\tacc_cntn = asyncio.start_server(get_cl, '127.0.0.1', ports[srv], loop=loop)\n\tsvr = loop.run_until_complete(acc_cntn)\n\n\t#print(\"host: \" + '127.0.0.1\\t' + \"port: \" + str(ports[srv]))\n\tserver_log.write(\"host: \" + '127.0.0.1\\t' + \"port: \" + str(ports[srv]) + '\\n\\n')\n\t\n\ttry:\n\t\tloop.run_forever()\n\texcept KeyboardInterrupt:\n\t\tpass\n\t\n\t#print('\\nserver closed\\ttime:', time.time())\n\tserver_log.write('\\nserver closed\\ttime:' + str(time.time()))\n\tsvr.close()\n\tloop.run_until_complete(svr.wait_closed())\n\tloop.close()\n\tserver_log.close()\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.5, "alphanum_fraction": 0.6499999761581421, "avg_line_length": 19, "blob_id": "7698225e68158f5a35c8153b409ac30ea1ee718a", "content_id": "9b6aed1604d06012a3d62ba84fab4d631ef18609", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 20, "license_type": "no_license", "max_line_length": 19, "num_lines": 1, "path": "/README.md", "repo_name": "natasha41575/CS131", "src_encoding": "UTF-8", "text": "# CS131 with Paul Eggert\n" }, { "alpha_fraction": 0.6141666769981384, "alphanum_fraction": 0.6858333349227905, "avg_line_length": 32.33333206176758, "blob_id": "d98ef96916b48657068fc684fb40b61e8000fcba", "content_id": "6dca43cde33fab4f5306152502363c2f16bcfdc7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1200, "license_type": "no_license", "max_line_length": 134, "num_lines": 36, "path": "/project/client.py", "repo_name": "natasha41575/CS131", "src_encoding": "UTF-8", "text": "import asyncio\nimport time\n\nasync def tcp_echo_client(message, loop):\n\treader, writer = await asyncio.open_connection('127.0.0.1', 12275, loop=loop)\n\t#reader, writer = await asyncio.open_connection('127.0.0.1', 12277, loop=loop)\n\tprint(\"Sending:\", message)\n\twriter.write(message.encode())\n\twriter.write_eof()\n\tdata = await reader.read(100000)\n\tprint(\"Received:\", data.decode())\n\t# writer.close()\n\n\ndef main():\n\t#message = \"\\t\\t\\t\\t\\t \\f\\f\\fIAMAT\\v\\v\\v\\v\\v \\t\\fkiwi.cs.ucla.edu -33.86705222+151.1957 {0}\\f\\r\\f\\f\\t\\t\\r\\n\".format(time.time())\n\t\n\t#loop.run_until_complete(tcp_echo_client(message, loop))\n\n\tmessage = \"IAMAT kiwi.cs.ucla.edu +34.068930-118.44512 {0}\\n\".format(time.time())\n\tloop = asyncio.get_event_loop()\n\tloop.run_until_complete(tcp_echo_client(message, loop))\n\n\n\t# message = \"IAMAT other +34.0698-118.445127 {0}\\n\".format(time.time())\n\t# loop.run_until_complete(tcp_echo_client(message, loop))\n\t\n\tloop = asyncio.get_event_loop()\n\tmessage = \"WHATSAT \\n\\nkiwi.cs.ucla.edu 50 10\\n\"\n\tloop.run_until_complete(tcp_echo_client(message, loop))\n\tloop.close()\n\t# message = \"WHATSAT other 5 20\\n\"\n\t# loop.run_until_complete(tcp_echo_client(message, loop))\n\nif __name__ == '__main__':\n\tmain()\n" } ]
8
lukavuko/mortgage-filter-package
https://github.com/lukavuko/mortgage-filter-package
01279aa29af40b2174620fc9cd2eed536e6c08d0
187d771c441f93b6a5dd2c5bf67ee519d1888430
24eec6e1eea4c2a688d340ed61d9fc30b4135fce
refs/heads/main
2023-03-28T17:52:27.302494
2021-04-05T20:15:13
2021-04-05T20:15:13
329,435,441
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.47049281001091003, "alphanum_fraction": 0.48813629150390625, "avg_line_length": 49.29591751098633, "blob_id": "dc074b400e70ac1f9d3d3593e95d5b27875c4f05", "content_id": "7541ef753d3fe0c5f5e71a80d0dc592dbd37286b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4931, "license_type": "permissive", "max_line_length": 95, "num_lines": 98, "path": "/tests/test_filter.py", "repo_name": "lukavuko/mortgage-filter-package", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding: utf-8\n\nfrom mortgage_filter.mortgage_filter import *\n#from mortgage_package.exceptions import *\n\nimport unittest\n\nclass Test_filter(unittest.TestCase):\n \n @classmethod\n def setUpClass(cls):\n ## Import dependencies\n from pandas import read_csv\n from numpy import linspace \n print('!!! Class setup for Test(Test_filter) !!!')\n \n def setUp(self):\n ## Import property dataset\n self.df = pd.read_csv('data/vancouver_area_testing_set.csv', header = 0)\n self.df = self.df.loc[:,['Area', 'House Price']]\n \n ## Prior knowns for 4 test sets for the property filter \n self.downpayment = np.linspace(100000, 600000, 4)\n self.mortgage_rate = np.linspace(1.6, 3.8, 4)\n self.mortgage_term = np.linspace(1, 10, 4)\n self.max_monthly_payment = np.linspace(1500, 11000, 4)\n self.max_loan = np.linspace(100000, 600000, 4)/0.05 \n \n print('~~~ start test ~~~')\n \n \n def test_property_filter(self):\n self.expected1 = ['East Burnaby', 'North Vancouver', 'North Vancouver']\n self.expected2 = ['East Burnaby', 'North Vancouver', 'West Vancouver']\n \n ## Test first case to ensure returned dataframe is empty as expected.\n self.assertTrue(property_filter(property_data = self.df,\n downpayment = self.downpayment[0],\n mortgage_rate = self.mortgage_rate[0],\n max_monthly_payment = self.max_monthly_payment[0],\n max_loan = self.max_loan[0]).empty) \n self.assertTrue(property_filter(property_data = self.df,\n downpayment = self.downpayment[0],\n mortgage_term = self.mortgage_term[0],\n max_monthly_payment = self.max_monthly_payment[0],\n max_loan = self.max_loan[0]).empty)\n \n ## Test to ensure handling of bad input objects\n ## Dataframe has wrong column type for price\n self.assertIsNone(property_filter(property_data = self.df.astype('string'),\n downpayment = self.downpayment[0],\n mortgage_term = self.mortgage_term[0],\n max_monthly_payment = self.max_monthly_payment[0],\n max_loan = self.max_loan[0]))\n ## Non dataframe object given\n self.assertIsNone(property_filter(property_data = 'NOT a dataframe object',\n downpayment = self.downpayment[0],\n mortgage_term = self.mortgage_term[0],\n max_monthly_payment = self.max_monthly_payment[0],\n max_loan = self.max_loan[0]))\n ## Dataframe doesnt have 2 columns (must have 2)\n self.x = self.df.drop('Area', axis=1)\n self.assertIsNone(property_filter(property_data = self.x,\n downpayment = self.downpayment[0],\n mortgage_term = self.mortgage_term[0],\n max_monthly_payment = self.max_monthly_payment[0],\n max_loan = self.max_loan[0]))\n \n ## Test last three cases where return is a non empty dataframe\n for i in range(1, 4):\n \n ## Testing with known mortgage rate\n self.assertEqual((property_filter(property_data = self.df,\n downpayment = self.downpayment[i],\n mortgage_rate = self.mortgage_rate[i],\n max_monthly_payment = self.max_monthly_payment[i],\n max_loan = self.max_loan[i]).iloc[0, 0]), \n self.expected1[i-1]) \n\n ## Testing with unknown mortgage rate\n self.assertEqual(property_filter(property_data = self.df,\n downpayment = self.downpayment[i],\n mortgage_term = self.mortgage_term[i],\n max_monthly_payment = self.max_monthly_payment[i],\n max_loan = self.max_loan[i]).iloc[0, 0], \n self.expected2[i-1])\n\n \n def tearDown(self):\n print('~~~ complete ~~~') \n \n @classmethod\n def tearDownClass(cls):\n print('!!! Class teardown for Test(Test_filter) !!!\\n\\n\\n')\n\n\n#unittest.main(argv=[''], verbosity=2, exit=False)\n\n\n" }, { "alpha_fraction": 0.7060301303863525, "alphanum_fraction": 0.7085427045822144, "avg_line_length": 21.11111068725586, "blob_id": "a9187a656d58548d42fe5ccf7f0c442b2fca4640", "content_id": "cf060f90a610b1ed405e8f87ab761ba971b2e7f3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 398, "license_type": "permissive", "max_line_length": 58, "num_lines": 18, "path": "/TEST_SUITE.py", "repo_name": "lukavuko/mortgage-filter-package", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding: utf-8\n\nimport unittest\nfrom tests.test_base import *\nfrom tests.test_filter import *\n\ndef my_suite():\n suite = unittest.TestSuite()\n result = unittest.TestResult()\n \n suite.addTest(unittest.makeSuite(Test_base_functions))\n suite.addTest(unittest.makeSuite(Test_filter))\n\n runner = unittest.TextTestRunner()\n print(runner.run(suite))\n\nmy_suite()\n" }, { "alpha_fraction": 0.5842379927635193, "alphanum_fraction": 0.6069363951683044, "avg_line_length": 26.488372802734375, "blob_id": "0c06730ec074af26020d2e61018d727236273aca", "content_id": "2b8caa9b59832cddebb087cf4862940c1a7865f2", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7093, "license_type": "permissive", "max_line_length": 107, "num_lines": 258, "path": "/mortgage_filter/mortgage_base.py", "repo_name": "lukavuko/mortgage-filter-package", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding: utf-8\n\n# # BASE FUNCTIONS\n# ### Functions for: \n# - minimum down payment\n# - mortgage rate by term\n# - mortgage insurance\n# - monthly payments\n# - optimal monthly payment\n# - total interest\n\n# In[1]:\n\nimport numpy as np\nfrom mortgage_filter.exceptions import *\n\n\n# In[2]:\n\n\ndef min_downpayment(price):\n ''' Returns the minimum downpayment required for a real estate\n price defined by the Financial Consumer Agency of Canada.\n (https://www.canada.ca/en/financial-consumer-agency/services/mortgages/down-payment.html)\n \n Arguments\n ----------\n price : numeric\n Property price or avereage area property price\n \n Return\n ------\n float\n minimum downpayment\n '''\n try:\n if price < 0:\n print(\"Invalid price\")\n return None\n elif price < 500000:\n return price*0.05\n elif price < 1000000:\n return (500000*0.05 + (price - 500000)*0.1)\n return price*0.2\n \n except TypeError:\n print(\"Invalid price input. Must be of type numeric\")\n return None\n\n\n# In[33]:\n\n\ndef mort_rate(term):\n ''' If no mortgage rate is specified this function can be used to\n return an estimated mortgage rate based on a regression fit (R^2 = 0.926)\n on average Canadian mortgage rates for possible term lengths.\n (https://www.superbrokers.ca/tools/mortgage-rates-comparison/)\n \n Arguments\n ----------\n term : int\n contract length in years (from 1 to 10 years)\n \n Return\n ------\n float\n interest rate\n '''\n try:\n x = term\n if x < 1:\n raise TermError\n elif x > 10:\n raise TermError('Lengths greater than 10 years are not typically available.')\n elif isinstance(x, float):\n print('Warning: Term lengths are typically in whole years not fractions of years.')\n return round((0.0167*x**2 - 0.0337*x + 1.6851), 3)\n \n except TermError as TE:\n print(f'{TE} \\nTerms must range from 1 to 10 years, but calculation will be performed anyway.')\n return round((0.0167*x**2 - 0.0337*x + 1.6851), 3)\n \n\n\n# In[23]:\n\n\ndef mortgage_insurance(price, downpayment):\n ''' Returns the cost of mortgage insurance.\n \n Insurance rates are calculated from loan to asset price ratio.\n Rates are applied to the loan to generate a lump sum amount that's\n then added to the principal of the loan to give mortgage insurance.\n \n Arguments\n ----------\n price : numeric\n Property price\n \n downpayment : int or float\n Downpayment on property\n \n Return\n ------\n float\n Mortgage insurance\n '''\n try:\n DP_proportion = downpayment / price\n\n # if downpayment more than 20% of the house price no mortgage insurance required.\n if DP_proportion >= 0.2:\n return 0\n elif DP_proportion < 0.05:\n raise PovertyError('Downpayment must be at least 5% the asset value')\n\n loan_to_price = (price-downpayment)/price\n x = loan_to_price\n\n # loan to price ratio determines insurance rate\n insurance_rate = (2924.5*x**4 - 9340.3*x**3 + 11116*x**2 - 5830.8*x + 1137.1)/100\n\n # mortgage insurance is a % applied to the mortgage amount (price - downpayment)\n return round(((price - downpayment) * insurance_rate), 2)\n \n except TypeError:\n print('Bad entry type. Received:', type(price), type(downpayment))\n return None\n except PovertyError as PE:\n print(PE, '\\nInput value is too low to be legally considered.')\n return None\n except ZeroDivisionError:\n print('Price cannot be zero.')\n return None\n \n\n\n# In[14]:\n\n\ndef monthly_payment(principal, mortgage_rate, amortization, months = False):\n ''' Returns the monthly payment required to meet the given amortization period.\n Assumes payments occur on a monthly basis.\n\n Arguments\n ----------\n principal : numeric\n \n mortgage_rate : float\n Annual mortgage rate (loan interest)\n \n amortization: int\n Amortization period in years (or in months if months == True)\n \n months : bool \n (Optional) if True, amortization period is interpreted in months (default = False)\n \n Return\n ------\n float\n monthly payment\n '''\n \n R = (mortgage_rate/100/12 + 1) ## monthly interest rate\n \n if months == True:\n n = amortization ## if specified in months, amortization = the number of payments \n else:\n n = amortization*12 ## convert amortization in years to the number of monthly payments\n \n monthly_contribution = principal * ((R**n)*(1-R)/(1-R**n))\n \n return round(monthly_contribution, 2)\n\n\n# In[12]:\n\n\ndef optimal_monthly_payment(principal, mortgage_rate, max_monthly_payment):\n ''' Returns the first amortization period which has a monthly payment\n less than your max_monthly_payment (ie. within budget). The shortest\n possible amortization period has the lowest long term interest cost.\n\n Arguments\n ----------\n principal : numeric\n \n mortgage_rate : float\n Annual mortgage rate (loan interest)\n \n max_monthly_payment: numeric\n Your max affordable monthly contribution\n \n Return\n ------\n list\n mp: monthly payment for a given amortization\n i: amortization period in years\n '''\n try:\n for i in range(1, 26):\n mp = monthly_payment(principal, mortgage_rate, i, months = False)\n if mp <= max_monthly_payment:\n return [mp, i]\n return [np.nan, np.nan]\n \n except TypeError:\n print('Bad entry type. Received:', type(principal), type(mortgage_rate), type(max_monthly_payment))\n return None\n\n\n# In[19]:\n\n\ndef total_interest(principal, mortgage_rate, monthly_payment):\n ''' Returns the cumulative interest paid on a given principal, mortgage rate, and monthly payment.\n \n Arguments\n ----------\n principal : numeric\n \n mortgage_rate : float\n Annual mortgage rate (loan interest)\n \n amortization: int\n Amortization period in years (or in months if months == True)\n \n monthly_payment : bool \n Monthly contribution towards the principal\n \n Return\n ------\n float\n Cumulative interest paid\n '''\n try:\n R = mortgage_rate/1200 ## monthly interest rate\n CumInterest = 0\n\n i = principal * R\n new_p = principal + i - monthly_payment\n\n while new_p > 0:\n CumInterest += i\n i = new_p * R\n new_p = new_p + i - monthly_payment\n\n if new_p >= new_p - i + monthly_payment:\n print(\"Monthly contribution is insufficient to pay off the original Principal.\")\n return None\n\n return round(CumInterest, 2)\n \n except TypeError:\n print('Bad entry type. Received:', type(principal), type(mortgage_rate), type(monthly_payment))\n return None\n\n" }, { "alpha_fraction": 0.7027027010917664, "alphanum_fraction": 0.7106518149375916, "avg_line_length": 24.1200008392334, "blob_id": "4f853fc764a0e36cd0e3bb0b44fb2dbdb7a1584b", "content_id": "49778eacf02e9e222b2c9dedc5b8c04980dc400c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 629, "license_type": "permissive", "max_line_length": 86, "num_lines": 25, "path": "/build/lib/mortgage_package/exceptions.py", "repo_name": "lukavuko/mortgage-filter-package", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding: utf-8\n\n# In[3]:\n\n\nclass mortgageError(Exception):\n '''Mortgage subpackage custom exception class.'''\n pass\n\nclass FilterInputError(mortgageError):\n '''Raised when property filter receives a non dataframe object'''\n pass\n\nclass FormatError(mortgageError):\n '''Raised when a function's input has an improper format'''\n pass\n\nclass TermError(mortgageError):\n '''Raised if term length is is unavailable. Terms must range from (1,10) years.'''\n pass\n\nclass PovertyError(mortgageError):\n '''Raised when downpayment input value is too low to be legally considered'''\n pass\n\n" }, { "alpha_fraction": 0.8222222328186035, "alphanum_fraction": 0.8222222328186035, "avg_line_length": 43.66666793823242, "blob_id": "d74b06a80fb5fc3318c0de341a72ec526b106059", "content_id": "b803fc18f737e5b2b9c365f695233e7a1324d858", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 135, "license_type": "permissive", "max_line_length": 46, "num_lines": 3, "path": "/build/lib/mortgage_package/__init__.py", "repo_name": "lukavuko/mortgage-filter-package", "src_encoding": "UTF-8", "text": "from mortgage_package.mortgage_filter import *\nfrom mortgage_package.mortgage_base import *\nfrom mortgage_package.exceptions import *\n\n" }, { "alpha_fraction": 0.5113300681114197, "alphanum_fraction": 0.5881773233413696, "avg_line_length": 39.078948974609375, "blob_id": "af02fe6ff328e561bd6ea0198d6c41d42c921ccf", "content_id": "8bfb69b2cbab40f55a5b0843339a9b91eddc027a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3045, "license_type": "permissive", "max_line_length": 125, "num_lines": 76, "path": "/tests/test_base.py", "repo_name": "lukavuko/mortgage-filter-package", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding: utf-8\n\nfrom mortgage_filter.mortgage_base import *\n\nimport unittest\n\nclass Test_base_functions(unittest.TestCase):\n \n @classmethod\n def setUpClass(cls):\n ## Import dependencies\n from numpy import nan, linspace\n print('!!! Class setup for Test(Test_base_functions) !!!')\n \n def setUp(self):\n ## Test values\n self.downpayment = [0, 0, 0, 200000, 150000, 50000]\n self.max_monthly_payment = np.linspace(1500, 11000, 4)\n self.principal = 600000\n self.mortgage_rate = 2.6\n self.price = ['4000000', -400000, 0, 400000, 800000, 1200000]\n self.term = [-1, 0.5, 1, 5.5, 10, 12]\n self.amortization_years = [1, 8, 16, 24]\n self.amortization_months = [12, 96, 192, 288]\n print('~~~ start test ~~~')\n \n \n def test_min_downpayment(self):\n self.expected = [None, None, 0, 20000, 55000, 240000]\n for i in range(6):\n self.assertEqual(min_downpayment(self.price[i]), self.expected[i]) \n \n def test_mort_rate(self):\n self.expected = [1.736, 1.672, 1.668, 2.005, 3.018, 3.686]\n for i in range(6):\n self.assertEqual(mort_rate(self.term[i]), self.expected[i])\n \n def test_mortgage_insurance(self):\n self.expected = [None, None, None, 0, 16026.85, None]\n for i in range(6):\n self.assertEqual(mortgage_insurance(self.price[i], self.downpayment[i]), self.expected[i])\n \n def test_monthly_payment(self):\n self.expected = [50706.96, 6929.26, 3823.27, 2802.68]\n for i in range(4):\n self.assertEqual(monthly_payment(self.principal, self.mortgage_rate, self.amortization_years[i], months = False),\n self.expected[i])\n self.assertEqual(monthly_payment(self.principal, self.mortgage_rate, self.amortization_months[i], months = True),\n self.expected[i])\n \n def test_optimal_monthly_payment(self):\n self.expected = [[np.nan, np.nan], [4536.83, 13], [7820.28, 7], [10674.89, 5]]\n for i in range(4):\n self.assertEqual(optimal_monthly_payment(self.principal, self.mortgage_rate, self.max_monthly_payment[i]),\n self.expected[i])\n\n def test_total_interest(self):\n self.expected = [796440.43, 104031.41, 56784.2, 39211.53]\n for i in range(4):\n self.assertEqual(total_interest(self.principal, self.mortgage_rate, self.max_monthly_payment[i]),\n self.expected[i])\n \n ## handling test for insufficient monthly income leading to infinitely large interest\n self.assertIsNone(total_interest(self.principal, self.mortgage_rate, 100), None)\n \n \n def tearDown(self):\n print('~~~ complete ~~~') \n \n @classmethod\n def tearDownClass(cls):\n print('!!! Class teardown for Test(Test_base_functions) !!!\\n\\n\\n')\n\n\n#unittest.main(argv=[''], verbosity=2, exit=False)" }, { "alpha_fraction": 0.664766252040863, "alphanum_fraction": 0.6716077327728271, "avg_line_length": 30.35714340209961, "blob_id": "1c309c904494353b1c50fde0a2080664dfc2834d", "content_id": "2d8fffd47da33b7efe7f0e8913b357c3d44fde39", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 877, "license_type": "permissive", "max_line_length": 102, "num_lines": 28, "path": "/setup.py", "repo_name": "lukavuko/mortgage-filter-package", "src_encoding": "UTF-8", "text": "import pathlib\nimport setuptools\n\n# The directory containing this file\nHERE = pathlib.Path(__file__).parent\n\n# The text of the README file\nREADME = (HERE / \"README.md\").read_text()\n\nsetuptools.setup(\n name='mortgage-filter-lukavuko',\n version='0.2.4',\n packages=setuptools.find_packages(exclude=['tests*']),\n license='MIT',\n description='A package for filtering real estate opportunities based on your financial situation',\n long_description=README,\n long_description_content_type = 'text/markdown',\n url='https://github.com/lukavuko/mortgage-filter-package',\n author='Luka Vukovic',\n author_email='[email protected]',\n classifiers=[\n \"License :: OSI Approved :: MIT License\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.7\",\n ],\n include_package_data=True,\n install_requires=[\"pandas\", \"numpy\"]\n)" }, { "alpha_fraction": 0.6510713696479797, "alphanum_fraction": 0.6544132232666016, "avg_line_length": 38.42635726928711, "blob_id": "768ec853a6a9ca56727d23194c02c98b14a92799", "content_id": "bb9cfe8104cc883ccda800a7f1bc082898b4d8e8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5087, "license_type": "permissive", "max_line_length": 264, "num_lines": 129, "path": "/build/lib/mortgage_package/mortgage_filter.py", "repo_name": "lukavuko/mortgage-filter-package", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding: utf-8\n\n# # PRIMARY FILTER FUNCTION\n# ### Uses base functions to compute affordability metrics.\n\n# In[1]:\n\n\nimport warnings, pandas as pd, numpy as np\nfrom mortgage_package.mortgage_base import *\nfrom mortgage_package.exceptions import *\n\n\n# In[4]:\n\n\ndef property_filter(property_data, downpayment, mortgage_rate = None, mortgage_term = None, max_monthly_payment = None, max_loan = None):\n ''' Given a dataframe of properties, their prices, and some basic financial information, it returns a dataframe with only the affordable properties and other affordability metrics (ie. how long it would take to pay off, monthly payments, total interest, etc.).\n \n Arguments\n ----------\n data : dataframe \n Areas/properties in column index 0 (str)\n Respective prices in column index 1 (numeric) \n \n downpayment : numeric\n Your maximal possible downpayment\n \n mortgage_rate : numeric \n Interest rate on the mortgage loan (leave empty if mortgage_term is provided)\n \n mortgage_term : int \n Contract length in years (1 to 10) for the mortgage interest rate.\n Only specify if you do not know what mortgage_rate to enter (leave empty if mortgage_rate provided)\n \n max_monthly_payment : numeric \n Your max affordable or bank limited monthly payment towards your home\n \n max_loan : numeric\n Max eligible loan based on your downpayment\n\n Return\n ------\n dataframe\n Properties/Areas\n Prices/Average area price\n Minimum_Downpayment\n Mortgage_Insurance\n Principal\n Monthly_Payment\n Shortest_Amortization\n Total_Interest\n Net_Cost (assuming no other fees)\n '''\n \n warnings.filterwarnings(\"ignore\") \n \n try:\n # is object a dataframe?\n if isinstance(property_data, pd.DataFrame) == False:\n raise FilterInputError('Dataframe object expected')\n # right number of columns?\n if len(property_data.columns)!=2:\n raise FormatError('Expected two columns of type str and numeric, respectively')\n # is column at index 1 (price) numeric?\n if pd.api.types.is_numeric_dtype(property_data.iloc[:,1]) == False:\n raise TypeError('Column at index 1 (price) must be numeric')\n \n \n except FilterInputError as FIE:\n print(FIE, '\\nReveived object of type:', type(property_data))\n return None\n except FormatError as FE:\n print(FE,'\\nReveived dataframe with this many columns:', len(property_data.columns))\n return None \n except TypeError as TE:\n print(TE)\n return None\n \n data = property_data.copy()\n \n # Rename columns\n data.set_axis(['Property/Area', 'Price'], axis=1, inplace=True)\n \n # Note original input of properties\n og_prop_count = data['Property/Area'].count()\n \n # FILTER: Downpayment. Remove properties where minimal DP exceeds your entered DP\n data['Minimum_Downpayment'] = data.iloc[:, 1].apply(lambda x: min_downpayment(x))\n data = data[data['Minimum_Downpayment'] <= downpayment]\n \n # Mortgage rate. If none provided give a reasonable estimate\n if mortgage_rate == None:\n mortgage_rate = mort_rate(mortgage_term)\n \n # Calculate mortgage insurance (default insurance) lump sum for each property\n data['Mortgage_Insurance'] = data.loc[:, 'Price'].apply(lambda p: mortgage_insurance(p, downpayment))\n \n # Calculate initial principal for each property\n data['Principal'] = round((data['Price'] - downpayment + data['Mortgage_Insurance']), 2)\n \n # FILTER: Max eligible loan. Remove properties where the principal exceeds the max approved loan\n # If no max loan specified assume no limit\n if max_loan != None:\n data = data[data['Principal'] < max_loan]\n \n # Add two columns for monthly payment and shortest amortization period\n # These are outputs of the optimal_monthly_payment function\n temp = data.loc[:, 'Principal'].apply(lambda principal: optimal_monthly_payment(principal, mortgage_rate, max_monthly_payment))\n temp = list(zip(*temp))\n \n data['Monthly_Payment'] = temp[0]\n data['Shortest_Amortization'] = temp[1]\n\n # FILTER: Remove rows where Monthly_Payment is NaN\n data = data[data['Monthly_Payment'].notnull()]\n \n # Add column for the cumulative cost of interest given that amortization\n tot_int = []\n for princ, monthly_payment in data[['Principal', 'Monthly_Payment']].itertuples(index=False):\n tot_int += [total_interest(princ, mortgage_rate, monthly_payment)]\n data['Total_Interest'] = tot_int\n \n # Add column for net cost of home (price + cumulative interest + mortgage insurance)\n data['Net_Cost'] = data['Price'] + data['Mortgage_Insurance'] + data['Total_Interest']\n \n print(f\"You can afford {data['Property/Area'].count()} properties from the {og_prop_count} you've provided.\")\n return data\n\n" }, { "alpha_fraction": 0.53990238904953, "alphanum_fraction": 0.6002788543701172, "avg_line_length": 26.06289291381836, "blob_id": "3d4936c5385ff665bae1c95ca46a8ce2b520293e", "content_id": "9d58a394591fc59276b3ea81065fe2d1606440d6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 21515, "license_type": "permissive", "max_line_length": 346, "num_lines": 795, "path": "/README.md", "repo_name": "lukavuko/mortgage-filter-package", "src_encoding": "UTF-8", "text": "![CI Tests](https://github.com/lukavuko/mortgage-filter-package/workflows/CI%20Tests/badge.svg)\n\n# The Mortgage-Filter-Package\nA Python package for processing real estate data frames that instantly yields an array of affordability information. Let's find our dream homes, enjoy and thanks for stopping by! :confetti_ball::balloon::confetti_ball::balloon:\n\n### Links\n* [to Pypi](https://pypi.org/project/mortgage-filter-lukavuko/)\n* [to Source](https://github.com/lukavuko/mortgage-filter-package)\n* [to Demo](#How-to-use-the-Mortgage-Filter)\n* [to Demo *notebook*](https://github.com/lukavuko/mortgage-filter-package/blob/main/demo/Demo.md)\n* [to Final Notes](#final-notes)\n* [to Documentation](#how-to-access-documentation)\n\n### Requirements\n- python >=3.7\n- pandas\n- numpy\n\n### Installation\n`$ pip install mortgage-filter-lukavuko`\n\n### Motivation\nAt some point in our live, we may consider buying a home. To do so, we'll sift through massive amounts of research and properties followed by calculating what's affordable and what isn't. How tedious. But wait... with the mortgage-filter-package, one can seamlessly filter through real estate opportunities based on one's financial circumstances.\n\nThe package is designed to filter property dataframes to yield the affordable ones and information regarding their affordability (monthly payments to minimize cumulative interest, years to pay off, mortgage insurance, etc.). \n\nThis was otherwise a small side project I wanted to do to better understand the home buying process and all the associated costs. I'm quite happy with the tool and hopefully more functionalies will be added in time!\n\n# How to use the Mortgage Filter\n\n***\n\nLets start by importing the package and any associated packages.\n\n```python\nfrom mortgage_filter import *\n\nimport pandas as pd, numpy as np\nimport matplotlib.pyplot as plt\n\n%matplotlib inline\nplt.style.use('default')\n```\n\n### Demo Data: Average housing prices by area in and around Vancouver, British Columbia\n- Data Structure:\n - The mortgage filter is currently designed to work on dataframes with two columns.\n - One column for the property/area\n - A second column for the price\n\n```python\nproperties = pd.read_csv('data/vancouver_area_testing_set.csv', usecols = [0,1])\nproperties.head()\n```\n\n<div>\n<table border=\"1\" class=\"dataframe\">\n <thead>\n <tr style=\"text-align: right;\">\n <th></th>\n <th>Area</th>\n <th>House Price</th>\n </tr>\n </thead>\n <tbody>\n <tr>\n <th>0</th>\n <td>Downtown &amp; Westside</td>\n <td>3118200</td>\n </tr>\n <tr>\n <th>1</th>\n <td>West Vancouver</td>\n <td>2743600</td>\n </tr>\n <tr>\n <th>2</th>\n <td>North Vancouver</td>\n <td>1665100</td>\n </tr>\n <tr>\n <th>3</th>\n <td>Richmond</td>\n <td>1581600</td>\n </tr>\n <tr>\n <th>4</th>\n <td>South Burnaby</td>\n <td>1564000</td>\n </tr>\n </tbody>\n</table>\n</div>\n\n\n### What properties can we afford?\n- Lets assume the following parameters:\n - downpayment of **\\$190,000**\n - monthly payments of **\\$4,800**|\n - mortgage rate?\n\nSince we may not know have a reasonable guess, let's specify a term length.\n\n*The interest rate typically depends on how long the term lasts with the bank.*\n\nLet's try a term of **15 years.**\n\n```python\nproperty_filter(property_data = properties,\n downpayment = 190000,\n mortgage_term = 15,\n max_monthly_payment = 4800)\n```\n\n Lengths greater than 10 years are not typically available. \n Terms must range from 1 to 10 years, but calculation will be performed anyway.\n You can afford 5 properties from the 25 you've provided.\n \n<div>\n<table border=\"1\" class=\"dataframe\">\n <thead>\n <tr style=\"text-align: right;\">\n <th></th>\n <th>Property/Area</th>\n <th>Price</th>\n <th>Minimum_Downpayment</th>\n <th>Mortgage_Insurance</th>\n <th>Principal</th>\n <th>Monthly_Payment</th>\n <th>Shortest_Amortization</th>\n <th>Total_Interest</th>\n <th>Net_Cost</th>\n </tr>\n </thead>\n <tbody>\n <tr>\n <th>20</th>\n <td>Pitt Meadows</td>\n <td>974800</td>\n <td>72480.0</td>\n <td>18793.83</td>\n <td>803593.83</td>\n <td>4767.48</td>\n <td>24</td>\n <td>569440.84</td>\n <td>1563034.67</td>\n </tr>\n <tr>\n <th>21</th>\n <td>North Delta</td>\n <td>972500</td>\n <td>72250.0</td>\n <td>18702.26</td>\n <td>801202.26</td>\n <td>4753.29</td>\n <td>24</td>\n <td>567746.53</td>\n <td>1558948.79</td>\n </tr>\n <tr>\n <th>22</th>\n <td>Maple Ridge</td>\n <td>884200</td>\n <td>63420.0</td>\n <td>0.00</td>\n <td>694200.00</td>\n <td>4698.63</td>\n <td>19</td>\n <td>377088.77</td>\n <td>1261288.77</td>\n </tr>\n <tr>\n <th>23</th>\n <td>Abbotsford</td>\n <td>873600</td>\n <td>62360.0</td>\n <td>0.00</td>\n <td>683600.00</td>\n <td>4782.72</td>\n <td>18</td>\n <td>349446.77</td>\n <td>1223046.77</td>\n </tr>\n <tr>\n <th>24</th>\n <td>Mission</td>\n <td>726000</td>\n <td>47600.0</td>\n <td>0.00</td>\n <td>536000.00</td>\n <td>4662.46</td>\n <td>13</td>\n <td>191344.02</td>\n <td>917344.02</td>\n </tr>\n </tbody>\n</table>\n</div>\n\n### We're left with what?\n\n- We're left with all the affordable home indexes as well as:\n - the listed prices\n - the minimum downpayment (5% of value)\n - the mortgage insurance\n - the principal (price - downpayment) \n - the optimal monthly payment for the shortest amortization period (years)\n - the cumulative interst\n - the net cost\n- We note there's a printed warning saying that terms must be less than or equal to 10 years (banks don't typically offer terms beyond 10 years).\n\n The function can handles the exception and **extrapolates from the term to interest rate function; however, this could yield misleading interest rates.**\n \n Lets see what the function looks like (interest as a function of term length):\n\n```python\nterms = np.arange(1, 15)\nrate = [mort_rate(t) for t in terms]\n \nplt.plot(terms, rate)\nplt.xlabel('Terms'); plt.ylabel('Interest Rate as %')\nplt.axvline(x = 10, linestyle='--', color = 'black')\nplt.grid()\n\nplt.savefig('output_7_1.svg', transparent = False, format = 'svg')\nplt.show()\n```\n\n Lengths greater than 10 years are not typically available. \n Terms must range from 1 to 10 years, but calculation will be performed anyway.\n Lengths greater than 10 years are not typically available. \n Terms must range from 1 to 10 years, but calculation will be performed anyway.\n Lengths greater than 10 years are not typically available. \n Terms must range from 1 to 10 years, but calculation will be performed anyway.\n Lengths greater than 10 years are not typically available. \n Terms must range from 1 to 10 years, but calculation will be performed anyway.\n \n![png](demo/output_7_1.svg)\n\n### Okay, now lets try using a mortgage rate of our own, say 2.8%.\n\n```python\nproperty_filter(property_data = properties,\n downpayment = 190000,\n mortgage_rate = 2.8,\n max_monthly_payment = 4800)\n```\n\n You can afford 5 properties from the 25 you've provided.\n \n\n<div>\n<table border=\"1\" class=\"dataframe\">\n <thead>\n <tr style=\"text-align: right;\">\n <th></th>\n <th>Property/Area</th>\n <th>Price</th>\n <th>Minimum_Downpayment</th>\n <th>Mortgage_Insurance</th>\n <th>Principal</th>\n <th>Monthly_Payment</th>\n <th>Shortest_Amortization</th>\n <th>Total_Interest</th>\n <th>Net_Cost</th>\n </tr>\n </thead>\n <tbody>\n <tr>\n <th>20</th>\n <td>Pitt Meadows</td>\n <td>974800</td>\n <td>72480.0</td>\n <td>18793.83</td>\n <td>803593.83</td>\n <td>4740.54</td>\n <td>18</td>\n <td>220350.90</td>\n <td>1213944.73</td>\n </tr>\n <tr>\n <th>21</th>\n <td>North Delta</td>\n <td>972500</td>\n <td>72250.0</td>\n <td>18702.26</td>\n <td>801202.26</td>\n <td>4726.43</td>\n <td>18</td>\n <td>219695.22</td>\n <td>1210897.48</td>\n </tr>\n <tr>\n <th>22</th>\n <td>Maple Ridge</td>\n <td>884200</td>\n <td>63420.0</td>\n <td>0.00</td>\n <td>694200.00</td>\n <td>4727.53</td>\n <td>15</td>\n <td>156743.49</td>\n <td>1040943.49</td>\n </tr>\n <tr>\n <th>23</th>\n <td>Abbotsford</td>\n <td>873600</td>\n <td>62360.0</td>\n <td>0.00</td>\n <td>683600.00</td>\n <td>4655.34</td>\n <td>15</td>\n <td>154350.26</td>\n <td>1027950.26</td>\n </tr>\n <tr>\n <th>24</th>\n <td>Mission</td>\n <td>726000</td>\n <td>47600.0</td>\n <td>0.00</td>\n <td>536000.00</td>\n <td>4722.69</td>\n <td>11</td>\n <td>87383.70</td>\n <td>813383.70</td>\n </tr>\n </tbody>\n</table>\n</div>\n\n### Notice how the last 4 columns have lower costs now.\n\n- Lets try one more time with a high downpayment/low monthly payment scenario.\n- Lets also assume a loan limit of **\\$600,000** and **\\$700,000** to see how this might affect a buying decision.\n\n```python\nproperty_filter(property_data = properties,\n downpayment = 500000,\n mortgage_rate = 2.8,\n max_monthly_payment = 3200,\n max_loan = 600000).head()\n```\n\n You can afford 12 properties from the 25 you've provided.\n \n<div>\n<table border=\"1\" class=\"dataframe\">\n <thead>\n <tr style=\"text-align: right;\">\n <th></th>\n <th>Property/Area</th>\n <th>Price</th>\n <th>Minimum_Downpayment</th>\n <th>Mortgage_Insurance</th>\n <th>Principal</th>\n <th>Monthly_Payment</th>\n <th>Shortest_Amortization</th>\n <th>Total_Interest</th>\n <th>Net_Cost</th>\n </tr>\n </thead>\n <tbody>\n <tr>\n <th>13</th>\n <td>Langley</td>\n <td>1090800</td>\n <td>218160.0</td>\n <td>0</td>\n <td>590800</td>\n <td>3103.53</td>\n <td>21</td>\n <td>191282.32</td>\n <td>1282082.32</td>\n </tr>\n <tr>\n <th>14</th>\n <td>Cloverdale</td>\n <td>1087400</td>\n <td>217480.0</td>\n <td>0</td>\n <td>587400</td>\n <td>3199.21</td>\n <td>20</td>\n <td>180410.55</td>\n <td>1267810.55</td>\n </tr>\n <tr>\n <th>15</th>\n <td>Central Surrey</td>\n <td>1086300</td>\n <td>217260.0</td>\n <td>0</td>\n <td>586300</td>\n <td>3193.22</td>\n <td>20</td>\n <td>180065.18</td>\n <td>1266365.18</td>\n </tr>\n <tr>\n <th>16</th>\n <td>Ladner</td>\n <td>1042000</td>\n <td>208400.0</td>\n <td>0</td>\n <td>542000</td>\n <td>3197.35</td>\n <td>18</td>\n <td>148627.69</td>\n <td>1190627.69</td>\n </tr>\n <tr>\n <th>17</th>\n <td>Port Coquitlam</td>\n <td>1034400</td>\n <td>206880.0</td>\n <td>0</td>\n <td>534400</td>\n <td>3152.52</td>\n <td>18</td>\n <td>146536.03</td>\n <td>1180936.03</td>\n </tr>\n </tbody>\n</table>\n</div>\n\n```python\nproperty_filter(property_data = properties,\n downpayment = 500000,\n mortgage_rate = 2.8,\n max_monthly_payment = 3200,\n max_loan = 700000).head()\n```\n\n You can afford 14 properties from the 25 you've provided.\n \n\n<div>\n<table border=\"1\" class=\"dataframe\">\n <thead>\n <tr style=\"text-align: right;\">\n <th></th>\n <th>Property/Area</th>\n <th>Price</th>\n <th>Minimum_Downpayment</th>\n <th>Mortgage_Insurance</th>\n <th>Principal</th>\n <th>Monthly_Payment</th>\n <th>Shortest_Amortization</th>\n <th>Total_Interest</th>\n <th>Net_Cost</th>\n </tr>\n </thead>\n <tbody>\n <tr>\n <th>11</th>\n <td>Tsawwassen</td>\n <td>1153300</td>\n <td>230660.0</td>\n <td>0</td>\n <td>653300</td>\n <td>3117.86</td>\n <td>24</td>\n <td>244644.99</td>\n <td>1397944.99</td>\n </tr>\n <tr>\n <th>12</th>\n <td>New Westminster</td>\n <td>1127200</td>\n <td>225440.0</td>\n <td>0</td>\n <td>627200</td>\n <td>3184.83</td>\n <td>22</td>\n <td>213587.50</td>\n <td>1340787.50</td>\n </tr>\n <tr>\n <th>13</th>\n <td>Langley</td>\n <td>1090800</td>\n <td>218160.0</td>\n <td>0</td>\n <td>590800</td>\n <td>3103.53</td>\n <td>21</td>\n <td>191282.32</td>\n <td>1282082.32</td>\n </tr>\n <tr>\n <th>14</th>\n <td>Cloverdale</td>\n <td>1087400</td>\n <td>217480.0</td>\n <td>0</td>\n <td>587400</td>\n <td>3199.21</td>\n <td>20</td>\n <td>180410.55</td>\n <td>1267810.55</td>\n </tr>\n <tr>\n <th>15</th>\n <td>Central Surrey</td>\n <td>1086300</td>\n <td>217260.0</td>\n <td>0</td>\n <td>586300</td>\n <td>3193.22</td>\n <td>20</td>\n <td>180065.18</td>\n <td>1266365.18</td>\n </tr>\n </tbody>\n</table>\n</div>\n\n### Note that no mortgage insurance is applied.\n\n***In Canada, downpayments >20% of the home price do not require mortgage insurance.***\n\n***\n\n### If you'd like to dive deeper at one scenario in particular, base functions could be used as follows:\n\n1. How quickly does cumulative interest increase as the mortgage rate increases?\n2. How does cumulative interest change as monthly contributions increase??\n3. How does the amortization period change as as monthly contributions increase?\n4. What's my monthly payment on a **\\$500,000** home for a 20 year amortization on 2% interest?\n5. How does mortgage insurance change with downpayment?\n\n```python\n# Parameters\nprinc = 500000 # principal of 500k\nmth_pay = 2000 # monthly payment of 2k\nmort_rate = 2.0 # mortgage rate of 2%\n```\n\n\n```python\n# Question 1\n\nrate = np.linspace(1, 5, 20)\ninterest = [total_interest(princ, mortgage_rate = i, monthly_payment = mth_pay) for i in rate]\n\nplt.plot(rate, interest)\nplt.ticklabel_format(axis='y', style='plain')\nplt.xlabel('Interest Rate as %'); plt.ylabel('Cumulative Interest'); plt.grid()\n\nplt.savefig('output_15_1.svg', transparent = False, format = 'svg')\nplt.show()\n\n```\n\n Monthly contribution is insufficient to pay off the original Principal.\n \n\n![png](demo/output_15_1.svg)\n\n```python\n# Question 2\n\npayments = np.linspace(800, 6000, 20)\ninterest = [total_interest(princ, mort_rate, monthly_payment = p) for p in payments]\n\nplt.plot(payments, interest)\nplt.ticklabel_format(axis='y', style='plain')\nplt.xlabel('Monthly Payment Amount'); plt.ylabel('Cumulative Interest'); plt.grid()\n\nplt.savefig('output_16_1.svg', transparent = False, format = 'svg')\nplt.show()\n```\n\n Monthly contribution is insufficient to pay off the original Principal.\n \n\n![png](demo/output_16_1.svg)\n \n- Note that as the monthly payment increases, not only does cumulative interest decrease, but the amortization period decreases substantially but this isn't see in the plot above.\n- Lets just peek at how the amortization period decreases.\n\n```python\n# Question 3\n\nyears = [optimal_monthly_payment(princ, mort_rate, max_monthly_payment = p)[1] for p in payments]\n\nplt.plot(payments, years)\nplt.xlabel('Max Monthly Payment'); plt.ylabel('Years to Pay Off'); plt.grid()\n\nplt.savefig('output_18_1.svg', transparent = False, format = 'svg')\nplt.show()\n```\n \n![png](demo/output_18_1.svg)\n\n```python\n# Question 4\n\nmonthly_payment(princ, mort_rate, amortization = 20)\n```\n\n\n\n\n 2529.42\n\n\n\n\n```python\n# Question 5\ndownpayment_size = np.linspace(22000, 125000, 28)\nmort_ins = [mortgage_insurance(princ, d) for d in downpayment_size]\n\nplt.plot(downpayment_size, mort_ins)\nplt.xlabel('Downpayment on $500,000'); plt.ylabel('Mortgage Insurance'); plt.grid()\n\nplt.savefig('output_20_1.svg', transparent = False, format = 'svg')\nplt.show()\n```\n\n Downpayment must be at least 5% the asset value \n Input value is too low to be legally considered.\n \n\n![png](demo/output_20_1.svg)\n \n - Notice how a message prints for downpayments less than 5% the property value (minimal downpayment).\n - Also notice how at a downpayment of 20% mortgage insurance no longer applies.\n \n***\n\n# Final Notes\n#### **I plan to continue adding features as time goes but for now I just wanted to understand and work with the fundamentals.**\n#### **If you have more questions or requests please reach out to me at my email, [email protected]**\n\n### To Do\n- Use Sphinx for documentation building\n- Add an ML component for predicting true property valuation to compare with current market value\n- Add relevant visualization wrappers (ie. property overlay on maps)\n- Add an API for pulling listed property information from the web\n- More exception handling\n\n### Done\n- Add a demo\n- Configure continuous integration testing --> Github action .yml configured\n- Test suite provides >95% coverage\n- Passing build stamp\n- Publish package (i.e., upload the package to PyPi) and add the link to the README file.\n\n### How to access documentation\n\n***A formal documentation is in the works but for now, a call on help will display the function docs:***\n\n\n```python\nhelp(mortgage_filter)\n```\n\n Help on module mortgage_filter.mortgage_filter in mortgage_filter:\n \n NAME\n mortgage_filter.mortgage_filter - # coding: utf-8\n \n FUNCTIONS\n property_filter(property_data, downpayment, mortgage_rate=None, mortgage_term=None, max_monthly_payment=None, max_loan=None)\n Given a dataframe of properties, their prices, and some basic financial information, it returns a dataframe with only the affordable properties and other affordability metrics (ie. how long it would take to pay off, monthly payments, total interest, etc.).\n \n Arguments\n ----------\n data : dataframe \n Areas/properties in column index 0 (str)\n Respective prices in column index 1 (numeric) \n \n downpayment : numeric\n Your maximal possible downpayment\n \n mortgage_rate : numeric \n Interest rate on the mortgage loan (leave empty if mortgage_term is provided)\n \n mortgage_term : int \n Contract length in years (1 to 10) for the mortgage interest rate.\n Only specify if you do not know what mortgage_rate to enter (leave empty if mortgage_rate provided)\n \n max_monthly_payment : numeric \n Your max affordable or bank limited monthly payment towards your home\n \n max_loan : numeric\n Max eligible loan based on your downpayment\n \n Return\n ------\n dataframe\n Properties/Areas\n Prices/Average area price\n Minimum_Downpayment\n Mortgage_Insurance\n Principal\n Monthly_Payment\n Shortest_Amortization\n Total_Interest\n Net_Cost (assuming no other fees)\n \n \n \n\n\n```python\nhelp(total_interest)\n```\n\n Help on function total_interest in module mortgage_filter.mortgage_base:\n \n total_interest(principal, mortgage_rate, monthly_payment)\n Returns the cumulative interest paid on a given principal, mortgage rate, and monthly payment.\n \n Arguments\n ----------\n principal : numeric\n \n mortgage_rate : float\n Annual mortgage rate (loan interest)\n \n amortization: int\n Amortization period in years (or in months if months == True)\n \n monthly_payment : bool \n Monthly contribution towards the principal\n \n Return\n ------\n float\n Cumulative interest paid\n \n \n\n\n```python\nhelp(optimal_monthly_payment)\n```\n\n Help on function optimal_monthly_payment in module mortgage_filter.mortgage_base:\n \n optimal_monthly_payment(principal, mortgage_rate, max_monthly_payment)\n Returns the first amortization period which has a monthly payment\n less than your max_monthly_payment (ie. within budget). The shortest\n possible amortization period has the lowest long term interest cost.\n \n Arguments\n ----------\n principal : numeric\n \n mortgage_rate : float\n Annual mortgage rate (loan interest)\n \n max_monthly_payment: numeric\n Your max affordable monthly contribution\n \n Return\n ------\n list\n mp: monthly payment for a given amortization\n i: amortization period in years\n \n \n\n\n```python\nhelp(mortgage_insurance)\n```\n\n Help on function mortgage_insurance in module mortgage_filter.mortgage_base:\n \n mortgage_insurance(price, downpayment)\n Returns the cost of mortgage insurance.\n \n Insurance rates are calculated from loan to asset price ratio.\n Rates are applied to the loan to generate a lump sum amount that's\n then added to the principal of the loan to give mortgage insurance.\n \n Arguments\n ----------\n price : numeric\n Property price\n \n downpayment : int or float\n Downpayment on property\n \n Return\n ------\n float\n Mortgage insurance\n" }, { "alpha_fraction": 0.8181818127632141, "alphanum_fraction": 0.8181818127632141, "avg_line_length": 42.66666793823242, "blob_id": "ea56f894ab0bd81fefd22ea87f83d911aa4c212c", "content_id": "c1bc1cd3407ed26127c070ac914737e0849d27ad", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 132, "license_type": "permissive", "max_line_length": 45, "num_lines": 3, "path": "/build/lib/mortgage_filter/__init__.py", "repo_name": "lukavuko/mortgage-filter-package", "src_encoding": "UTF-8", "text": "from mortgage_filter.mortgage_filter import *\nfrom mortgage_filter.mortgage_base import *\nfrom mortgage_filter.exceptions import *\n\n" } ]
10
stefaneng/catalogbot
https://github.com/stefaneng/catalogbot
d3abdbc945c0ca1e16838ba70b535989b1201806
d85903ecdf42e6607fd2de89cd4a217293114f8a
2ef1fa85725d8ef753fb88e16c686f401c18a002
refs/heads/master
2016-08-04T15:45:33.364805
2014-06-07T19:41:02
2014-06-07T19:41:02
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7425742745399475, "alphanum_fraction": 0.7821782231330872, "avg_line_length": 32.66666793823242, "blob_id": "abedd985b67929a1401a6c3145d0b21c1f55c7c5", "content_id": "7ff7938b70e2f375413975fb807a62be910a804e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 101, "license_type": "no_license", "max_line_length": 80, "num_lines": 3, "path": "/requirements.txt", "repo_name": "stefaneng/catalogbot", "src_encoding": "UTF-8", "text": "scrapy==0.22.2\nnose\n-e git+https://github.com/sprij/scrapy-rethinkdb.git#egg=scrapy_rethinkdb-master\n" }, { "alpha_fraction": 0.699914276599884, "alphanum_fraction": 0.727922260761261, "avg_line_length": 46.283782958984375, "blob_id": "268f2467b51b5e893fe2d6f0a68fec15ad845cd9", "content_id": "11bacdea4d0cf2ee4a735f7b0ad42a865be4ca06", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3499, "license_type": "no_license", "max_line_length": 365, "num_lines": 74, "path": "/tests/test_parsetools.py", "repo_name": "stefaneng/catalogbot", "src_encoding": "UTF-8", "text": "from catalogbot.parsetools import courseparser\nfrom catalogbot.items import CourseItem\n\ndef test_parse_remove_prereqs():\n \"\"\"Removed the prereq string from body: 'Prerequisite: COMP 490/L. Project-oriented lab to allow students to complete the design, implementation and testing of the team-based software engineering project started in COMP 490/L. Lab: 3 hours per week.'\"\"\"\n\n body = 'Prerequisite: COMP 490/L. Project-oriented lab to allow students to complete the design, implementation and testing of the team-based software engineering project started in COMP 490/L. Lab: 3 hours per week.'\n\n courseitem = CourseItem()\n target_string = 'Project-oriented lab to allow students to complete the design, implementation and testing of the team-based software engineering project started in COMP 490/L. Lab: 3 hours per week.'\n courseitem['description'] = target_string\n out_item = courseparser.parse_body(CourseItem(), body)\n\n print courseitem, out_item\n\n assert courseitem == out_item\n\ndef test_parse_body_to_prereqs():\n \"\"\"Tests 'Prerequisites: COMP 182/L; MATH 150A; PHIL 230. Study of discrete mathematical structures and proof techniques as used in computer science. Discrete structures, such as functions, relations, sets, graphs and trees. Proof techniques, such as proof by induction, proof by contradiction and proof by cases. Counting techniques. Lab: 3 hours per week.'\"\"\"\n\n body = 'Prerequisites: COMP 182/L; MATH 150A; PHIL 230. Study of discrete mathematical structures and proof techniques as used in computer science. Discrete structures, such as functions, relations, sets, graphs and trees. Proof techniques, such as proof by induction, proof by contradiction and proof by cases. Counting techniques. Lab: 3 hours per week.'\n target_string = 'Prerequisites: COMP 182/L; MATH 150A; PHIL 230.'\n out_string = courseparser.parse_prereqs_body(body)\n\n print target_string, out_string\n assert target_string == out_string\n\ndef test_parse_prereqs():\n \"\"\"Test parsing of prerequisites 'Prerequisites: COMP 256/L, 333.'\"\"\"\n\n courseitem = CourseItem()\n courseitem['prereqs'] = ['COMP 256/L', 'COMP 333']\n em_tag = 'Prerequisites: COMP 256/L, 333.'\n out_courseitem = courseparser.parse_em(CourseItem(), 'prereqs', em_tag)\n\n print courseitem, out_courseitem\n\n assert courseitem == out_courseitem\n\ndef test_parse_prereq_semicolons():\n\n test_string = 'Prerequisites: COMP 182/L; MATH 150A; PHIL 230.'\n courseitem = CourseItem()\n courseitem['prereqs'] = ['COMP 182/L', 'MATH 150A', 'PHIL 230']\n out_courseitem = courseparser.parse_em(CourseItem(), 'prereqs', test_string)\n\n print courseitem, out_courseitem\n\n assert courseitem == out_courseitem\n\ndef test_parse_title_empty():\n \"\"\"Test parsing an empty title\"\"\"\n\n courseitem = CourseItem()\n coursetitle = ''\n out_courseitem = courseparser.parse_title(CourseItem(), coursetitle)\n\n assert courseitem == out_courseitem\n\n\ndef test_parse_title():\n \"\"\"Parse a class title for 'COMP 310. Automata, Languages and Computation (3)'\"\"\"\n\n courseItem = CourseItem()\n courseItem['classname'] = 'COMP 310'\n courseItem['department'] = 'COMP'\n courseItem['number'] = '310'\n courseItem['longname'] = 'Automata, Languages and Computation'\n courseItem['units'] = '3'\n\n coursetitle = 'COMP 310. Automata, Languages and Computation (3)'\n out_courseItem = courseparser.parse_title(CourseItem(), coursetitle)\n\n assert courseItem == out_courseItem\n" }, { "alpha_fraction": 0.5786630511283875, "alphanum_fraction": 0.5827143788337708, "avg_line_length": 31.91111183166504, "blob_id": "07a6af46b73b019442402814129457f77aae2364", "content_id": "e7c8d5e648572f61233e2906dbde870dc449e89f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1481, "license_type": "no_license", "max_line_length": 86, "num_lines": 45, "path": "/catalogbot/spiders/catalog_spider.py", "repo_name": "stefaneng/catalogbot", "src_encoding": "UTF-8", "text": "from scrapy.spider import Spider\nfrom scrapy.http import Request\nfrom scrapy.selector import Selector\nfrom catalogbot.items import CourseItem\nfrom catalogbot.parsetools.courseparser import *\n\nclass CatalogSpider(Spider):\n name = \"catalog\"\n allowed_domains = [\"catalog.csun.edu\"]\n start_urls = [\n \"http://catalog.csun.edu/\"\n ]\n\n def parse_course(self, response):\n sel = Selector(response)\n\n for course_title_sel in sel.xpath('//div[@id=\"courses\"]/h4'):\n course = CourseItem()\n\n # Get the title line of course from the <h4>'s\n title = course_title_sel.xpath('text()').extract()[0]\n\n # Get the course body\n ptag = course_title_sel.xpath('following-sibling::p')\n em = ptag.xpath('em/text()').extract()\n pbody = course_title_sel.xpath('following-sibling::p/text()').extract()[0]\n\n if len(em) > 0:\n body = em[0] + pbody\n else:\n body = pbody\n\n course = parse_title(course, title)\n course = parse_prereqs(course, body)\n course = parse_coreqs(course, body)\n course = parse_prep(course, body)\n course = parse_body(course, pbody)\n\n yield course\n\n def parse(self, response):\n sel = Selector(response)\n\n for url in sel.xpath('//div[@class=\"cols\"]/ul/li/a/@href').extract():\n yield Request(url + 'courses/', callback=self.parse_course)\n" }, { "alpha_fraction": 0.6584362387657166, "alphanum_fraction": 0.6666666865348816, "avg_line_length": 26, "blob_id": "f37d304c1d6cb56fd2568a0cc787d513d89b3743", "content_id": "88c461a7d29815afb855772248c6488f9b6fe25c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 243, "license_type": "no_license", "max_line_length": 64, "num_lines": 9, "path": "/setup.py", "repo_name": "stefaneng/catalogbot", "src_encoding": "UTF-8", "text": "from setuptools import setup, find_packages\n\nsetup(\n name='catalogbot',\n version='1.0',\n packages=find_packages(exclude=(\"tests\")),\n entry_points={'scrapy': ['settings = catalogbot.settings']},\n test_suite = 'nose.collector',\n)\n" }, { "alpha_fraction": 0.6529563069343567, "alphanum_fraction": 0.6529563069343567, "avg_line_length": 21.882352828979492, "blob_id": "84b9001fe976c2c9414406bb3b59994da9e3bfa4", "content_id": "96889136f9bcb074f608318da7f0e9e897e69f65", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 389, "license_type": "no_license", "max_line_length": 51, "num_lines": 17, "path": "/catalogbot/items.py", "repo_name": "stefaneng/catalogbot", "src_encoding": "UTF-8", "text": "# Define here the models for your scraped items\n#\n# See documentation in:\n# http://doc.scrapy.org/en/latest/topics/items.html\n\nfrom scrapy.item import Item, Field\n\nclass CourseItem(Item):\n description = Field()\n classname = Field()\n department = Field()\n number = Field()\n longname = Field()\n units = Field()\n prereqs = Field()\n coreqs = Field()\n prep = Field()\n" }, { "alpha_fraction": 0.5865384340286255, "alphanum_fraction": 0.5943047404289246, "avg_line_length": 24.037036895751953, "blob_id": "519a20bc97fdc05feb405fcde6de7d53d7d3de33", "content_id": "47d14d33958102e18b0f5b94a9808be70151e05f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2704, "license_type": "no_license", "max_line_length": 83, "num_lines": 108, "path": "/catalogbot/parsetools/courseparser.py", "repo_name": "stefaneng/catalogbot", "src_encoding": "UTF-8", "text": "\"\"\"\nAssists in parsing text from the course pages.\n\"\"\"\nimport re\nfrom itertools import chain\n\ndef parse_title(courseitem, title):\n \"\"\"\n Take a class title and divide into courseitem\n \"\"\"\n\n split_title = title.split(' ')\n\n if len(split_title) <= 1:\n return courseitem\n\n classname = \" \".join(split_title[:2]).strip('.')\n rest_title = \" \".join(split_title[2:])\n longname = rest_title.split('(')[0].strip()\n\n m = re.search(r'\\((.*)\\)', rest_title)\n if m:\n courseitem['units'] = m.group(1)\n\n (dep, number) = classname.split(' ')\n\n courseitem['classname'] = classname\n courseitem['department'] = dep\n courseitem['number'] = number\n courseitem['longname'] = longname\n\n return courseitem\n\ndef parse_comma_string(comma_string):\n components = comma_string.split(',')\n first = components[0]\n name = first.split(' ')[0]\n\n name_number_list = [first] + [''.join([name, comp]) for comp in components[1:]]\n\n return name_number_list\n\ndef parse_em(courseitem, req_name, em_string):\n m = re.search(r':(.*?)\\.', em_string)\n if m:\n preq_string = m.group(1).strip(' ')\n else:\n return courseitem\n\n prereqs = [parse_comma_string(c.strip(' '))\n for c in preq_string.split(';')]\n\n courseitem[req_name] = list(chain.from_iterable(prereqs))\n\n return courseitem\n\ndef parse_coreqs(courseitem, body):\n coreq_string = parse_coreqs_body(body)\n courseitem = parse_em(courseitem, 'coreqs', coreq_string)\n return courseitem\n\ndef parse_prereqs(courseitem, body):\n prereq_string = parse_prereqs_body(body)\n courseitem = parse_em(courseitem, 'prereqs', prereq_string)\n return courseitem\n\ndef parse_prep(courseitem, body):\n prep_string = parse_prep_body(body)\n courseitem = parse_em(courseitem, 'prep', prep_string)\n return courseitem\n\ndef parse_coreqs_body(body):\n m = re.search(r'Coreq[a-z]+:.*?\\.', body)\n if m is not None:\n return m.group(0)\n else:\n return ''\n\ndef parse_prep_body(body):\n m = re.search(r'Prep[a-z]+:.*?\\.', body)\n if m is not None:\n return m.group(0)\n else:\n return ''\n\n\ndef parse_prereqs_body(body):\n m = re.search(r'Prereq[a-z]+:.*?\\.', body)\n if m is not None:\n return m.group(0)\n else:\n return ''\n\ndef parse_body(courseitem, body):\n m = re.search(r'Prereq[a-z]+:.*?\\.\\ (.*)', body)\n if m is not None:\n body = m.group(1)\n\n m2 = re.search(r'Coreq[a-z]+:.*?\\.\\ (.*)', body)\n if m2 is not None:\n body = m2.group(1)\n\n m3 = re.search(r'Prep[a-z]+:.*?\\.\\ (.*)', body)\n if m3 is not None:\n body = m3.group(1)\n\n courseitem['description'] = body\n return courseitem\n" }, { "alpha_fraction": 0.7808219194412231, "alphanum_fraction": 0.7808219194412231, "avg_line_length": 57.400001525878906, "blob_id": "988a3e98e2b1828d551773d435f97e78dc8bdb64", "content_id": "5e72057344c2e8606a29379faff3f8ea8b0b5101", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 292, "license_type": "no_license", "max_line_length": 152, "num_lines": 5, "path": "/README.md", "repo_name": "stefaneng/catalogbot", "src_encoding": "UTF-8", "text": "# CatalogBot\n\n[![Build Status](https://travis-ci.org/stefaneng/catalogbot.svg?branch=master)](https://travis-ci.org/stefaneng/catalogbot)\n\n[![Coverage Status](https://coveralls.io/repos/stefaneng/catalogbot/badge.png?branch=master)](https://coveralls.io/r/stefaneng/catalogbot?branch=master)\n" }, { "alpha_fraction": 0.6666666865348816, "alphanum_fraction": 0.6666666865348816, "avg_line_length": 17, "blob_id": "9da01cff76a7f08f697f24c4a08ecdcc16eac986", "content_id": "764b77a2b6bdf406231ad62f9c74d6d00a721e32", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 36, "license_type": "no_license", "max_line_length": 19, "num_lines": 2, "path": "/tests/example_test.py", "repo_name": "stefaneng/catalogbot", "src_encoding": "UTF-8", "text": "def example_test():\n assert True\n" }, { "alpha_fraction": 0.6578947305679321, "alphanum_fraction": 0.7105262875556946, "avg_line_length": 19.35714340209961, "blob_id": "7d4e024d40f7650b97684ee945450d0952d2a56d", "content_id": "19b659ab5cea07929d4cf846c4d26fa5f250ff27", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 570, "license_type": "no_license", "max_line_length": 135, "num_lines": 28, "path": "/catalogbot/settings.py", "repo_name": "stefaneng/catalogbot", "src_encoding": "UTF-8", "text": "# Scrapy settings for catalogbot project\n\nBOT_NAME = 'catalogbot'\n\nSPIDER_MODULES = ['catalogbot.spiders']\nNEWSPIDER_MODULE = 'catalogbot.spiders'\n\nITEM_PIPELINES = [\n 'scrapy_rethinkdb.RethinkDBPipeline',\n]\n\nUSER_AGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.114 Safari/537.36'\n\nDOWNLOAD_DELAY = 5\n\n#EXTENSIONS = {\n# 'catalogbot.extensions.storestats.storestats': 500\n#}\n\nRETHINKDB_TABLE = 'catalog'\n\nRETHINKDB_CONNECTION = {\n 'db': 'catalog'\n}\n\nRETHINKDB_INSERT_OPTIONS = {\n 'upsert': True\n}\n" } ]
9
poderonIF/python2uri
https://github.com/poderonIF/python2uri
1f36e09f61dc9a1ec0fd93362c03ddb00a9915d1
a59fdf9662388e8dd90259df59769a2b62ad5d5e
0fede3d8a1d86fdf65e66d318b136f7b33d5db8e
refs/heads/master
2019-01-26T10:30:28.688158
2017-07-26T12:26:18
2017-07-26T12:26:18
97,974,378
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.807692289352417, "alphanum_fraction": 0.8125, "avg_line_length": 68.33333587646484, "blob_id": "98f3fc03f0c07d427f5d6a021f2f98b86626123c", "content_id": "b6dd04655f708cbd41d2d745c60b5a692a7feb35", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 214, "license_type": "no_license", "max_line_length": 130, "num_lines": 3, "path": "/README.md", "repo_name": "poderonIF/python2uri", "src_encoding": "UTF-8", "text": "# python2uri\nCódigo open-source dos exercícios da plataforma Uri Online Judge, todas devidamente comentadas com explicação detalhada do código.\nLembre-se, na incentivo a cola do código, e sim, o aprendizado.\n" }, { "alpha_fraction": 0.6867219805717468, "alphanum_fraction": 0.6929460763931274, "avg_line_length": 79.33333587646484, "blob_id": "72f69e8cbe9ddd36b8afcab161889f133d3e372f", "content_id": "33078afcdbc5c07991bb78286b076047b0997a1b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 492, "license_type": "no_license", "max_line_length": 127, "num_lines": 6, "path": "/1014.py", "repo_name": "poderonIF/python2uri", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nX = int(input(\"\")) # A variável 'X' irá armazenar o valor a ser inserido no input como inteiro (int).\nY = float(input(\"\")) # A variável 'Y' irá armazenar o valor a ser inserido no input como valor flutuante (float).\ntotal = X/Y # O consumo mensal será mostrado pela divisão da variável 'X' e 'Y' armazenadas na variável 'total'.\nprint (\"%.3f km/l\" % total) # Será printado o consumo mensal, sendo formatado por três casas decimais depois do ponto ('%.3f').\n" }, { "alpha_fraction": 0.6760247349739075, "alphanum_fraction": 0.7007299065589905, "avg_line_length": 83.80952453613281, "blob_id": "6bb3f648fee964153a1f3c4d036112b5d9994a1f", "content_id": "cacd20ea4fc4b6076aa6294da3e527aceaed4ce3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1831, "license_type": "no_license", "max_line_length": 217, "num_lines": 21, "path": "/1040.py", "repo_name": "poderonIF/python2uri", "src_encoding": "UTF-8", "text": "prova = raw_input().split(\" \") # função \"raw_input()\" retorna o valor digitado como string, função \"split()\" coloca variáveis na mesma linha,Exemplo: \" 5 6 7 8 \"\nval = [float(e) for e in prova] #função \"float\" transforma a variável em valor com ponto flutuante(valor decimal), função For(para) in (em), serve para imprimir números um abaixo do outro. \nn1,n2,n3,n4 = val #variável retona para a função \"Split()\" na primeira linha, coloca-se o nome da variável para o pyhton interpretar que está com a função split.\n\nMedia = ((n1*2)+(n2*3)+(n3*4)+(n4*1))/10 # calculo da média como pedido na questao 1040 uri.\n\nif Media >= 7.0: #função if diz que se a média for maior ou igual a sete(7.0). \n print (\"Media: %.1f\\nAluno aprovado.\" % Media) #imprima a media, com formatação de %0.1\" que está formatando para número inteiro, \"\\n\" significa que irá pular uma linha.\n \nelif Media >= 5.0 and Media < 6.9 : #junção de if e else, indicando que ainda temos mais uma condição para verificar antes do código executar, operador and(e) ajuda a colocar mais uma situação em sua condição,if,elif.\n print (\"Media: %.1f\\nAluno em exame.\" % Media)#imprima a média com um valor de formatação %0.1f\" significa que haverá valor decimal após a vírgula, ponto\n prova = float(raw_input())\n media = float((Media+prova)/2)\n if Media >= 5.0:\n print(\"Nota do exame: %.1f\\nAluno aprovado.\\nMedia final: %.1f\" % (prova,media))\n else: #condiçao Se não, verifica quando há algo de errado nas outras condições.\n print(\"Nota do exame: %.1f\\nAluno reprovado.\\nMedia final: %.1f\" % (prova,media)) #se o resultado não for o esperado, imprima Nota do exame, com a formatação de ponto flutante, e imprima Aluno reprovado. \n\n\nelse:\n print (\"Media: %.1f\\nAluno reprovado.\" % Media)\n" }, { "alpha_fraction": 0.6673865914344788, "alphanum_fraction": 0.6832253336906433, "avg_line_length": 80.70587921142578, "blob_id": "4078d8554a1247228a37cb69306ab7a26eaddf3a", "content_id": "3734e53d91f5b7cfa175aa8ecc2ae91332f42eae", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1423, "license_type": "no_license", "max_line_length": 231, "num_lines": 17, "path": "/1036.py", "repo_name": "poderonIF/python2uri", "src_encoding": "UTF-8", "text": "import math# O comando import primeiro testa se o item está definido no pacote(função,variável,módulo) Aqui vamos usar uma função da biblioteca math do Python que nos retorna a raiz quadrada de um número.\n\nA,B,C = raw_input().split() #irá imprimir na tela os valores digitados nas variáveis, junto com o Split que irá deixa estes valores na mesma linha.\nA = float(A)#transforma a variável de inteiro, para numero decimal(Ponto flutuante).\nB = float(B)\nC = float(C)\nif (A != 0): # Usa-se o sinal \"!=\" para compararmos se um valor é diferente do outro.\n delta = (B * B) - (4 * A * C) # irá fazer a formula de Delta na matemática, multiplicando as variáveis.\n if (delta > 0): #verifica se a variável é maior que zero(0).\n x1 = (-B + math.sqrt(delta)) / (2 * A) #irá aplicar a formula de Baskhara, junto com a função math.sqrt(transforma a variavél \"delta\", em raiz quadrada).\n x2 = (-B - math.sqrt(delta)) / (2 * A)\n print(\"R1 = %.5f\" % x1) #imprime na tela o valor de R1, com a função \" %.5\" que irá colocar 5 casas decimais depois do ponto ou vírgula, e a função \" % x1\", irá transferir o resultado de x1 na linha 10, e irá imprimir aqui.\n print(\"R2 = %.5f\" % x2)\n elif (delta < 0): #junção de if e else, indica que temos mais uma condição para verificar antes de usar o Else.\n print(\"Impossivel calcular\")\nelse:\n print(\"Impossivel calcular\")\n" }, { "alpha_fraction": 0.6891191601753235, "alphanum_fraction": 0.6908462643623352, "avg_line_length": 95.16666412353516, "blob_id": "f965b89304a8ac15ab53af818a17b7a0c1a26cc3", "content_id": "1d2443027febeb36d7475cad410dee351a7bc918", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 591, "license_type": "no_license", "max_line_length": 150, "num_lines": 6, "path": "/1001.py", "repo_name": "poderonIF/python2uri", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\na = int(raw_input(\"\")) # Variável 'a' irá receber o valor a ser digitado no raw_input, que por sua vez foi convertido por um inteiro.\nb = raw_input(\"\") # Variável 'b' irá receber o valor a ser digitado no raw_input, que por sua vez será uma string, isto e, em caracteres.\nx = a + int(b) # Variável x irá receber o valor da soma da variável 'a' + variável 'b' convertida em inteiro. Será realizado a soma de ambos inteiros.\nprint(\"X = \"+str(x)) # Será printado o 'X =' seguido do valor agregado em '+str(x)', em que será convertido em string o inteiro. \n" }, { "alpha_fraction": 0.7105882167816162, "alphanum_fraction": 0.7164705991744995, "avg_line_length": 76.18181610107422, "blob_id": "2645f3ce4102ff61dc7dd22bb5bff012e9778ee4", "content_id": "2ca28fb1c72759f62e9236875db302e31f1d5215", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 873, "license_type": "no_license", "max_line_length": 198, "num_lines": 11, "path": "/1035.py", "repo_name": "poderonIF/python2uri", "src_encoding": "UTF-8", "text": "valores = raw_input().split(\" \") #irá pedir 4 valores, junto com o Split que separa uma string(caracteres) conforme um delimitador.\na, b, c, d = valores #este é o delimitador, coloca-se a variável da primeira linha para identificar que está usando a função Split.\n\nif (int(b) > int(c) and int(d) > int(a) and (int(c) + int(d)) > (int(a) + int(b)) and int(c) >= 0 and int(d) >= 0 and int(a)%2==0):\n print(\"Valores aceitos\") \n\n# a função if, irá verificar se cada variável é maior somando algumas das mesmas, junto com a função int(que irá transformar o valor para inteiro), no final irá verificar se é maior ou igual a zero.\n# o print irá escrever na tela se os valores são aceitos.\n\nelse:#serve para dizer, se não os valores testados na função if, não forem aceitos:\n print(\"Valores nao aceitos\")#escreva na tela que os valores não forem aceitos.\n\n" }, { "alpha_fraction": 0.6207792162895203, "alphanum_fraction": 0.6740259528160095, "avg_line_length": 63.16666793823242, "blob_id": "3a35e7915feaaa616511e48f7b7d87f18743f25f", "content_id": "c02f2a367e7ac68a85ccc63a92e9c18b6759bf88", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 778, "license_type": "no_license", "max_line_length": 132, "num_lines": 12, "path": "/1037.py", "repo_name": "poderonIF/python2uri", "src_encoding": "UTF-8", "text": "value = float(input()) #pede para digitar o valor, junto com o float que irá transformar numero inteiro, para número decimal. \n\nif (value >= 0) and (value <= 25): #verifica se o valor digitado é maior ou igual a zero, and(e) , se o valor é menor ou igual a 25.\n print(\"Intervalo [0,25]\") #imprime na tela o intervalo \" 0,25\"\nelif (value > 25) and (value <= 50): #verifica se o valor digitado é maior ou igual a 25 and(e), se o valor é menor ou igual a 50.\n print(\"Intervalo (25,50]\")\nelif (value > 50) and (value <= 75):\n print(\"Intervalo (50,75]\")\nelif (value > 75) and (value <= 100):\n print(\"Intervalo (75,100]\")\nelse: #se todas as funções forem verificadas e nenhuma der certo.\n print(\"Fora de intervalo\") # ele imprimir na tela \"Fora de intervalo\".\n" }, { "alpha_fraction": 0.7037914395332336, "alphanum_fraction": 0.7037914395332336, "avg_line_length": 69.33333587646484, "blob_id": "3e280003e16ec93c0e996d7369aca84c07147f7e", "content_id": "f2131cbdf45fc9ec67dd5e8de8dbd428746b21ff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 435, "license_type": "no_license", "max_line_length": 172, "num_lines": 6, "path": "/1003.py", "repo_name": "poderonIF/python2uri", "src_encoding": "UTF-8", "text": "A = int(input()) # O comando int, serve para deixar o número da variável em forma inteira, input() retorna o valor como inteiro.\nB = int(input())\n\nSOMA = A + B # Como na matemática básica, no python pode-se somar números com o modulo de sinal \"+\".\n\nprint \"SOMA = %d\" %SOMA # A função \"Print\" imprime na tela o que você escreveu, \"%d% formata o número para Decimal e inteiro, pode-se usar também \"%i\" que é a mesma função.\n" }, { "alpha_fraction": 0.698305070400238, "alphanum_fraction": 0.7135593295097351, "avg_line_length": 97.33333587646484, "blob_id": "a18b13dfc1b704fba53f19c5540352b43b835e0d", "content_id": "df0e529b4313439e6829686b9f6decaeb0da52b2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 603, "license_type": "no_license", "max_line_length": 205, "num_lines": 6, "path": "/1004.py", "repo_name": "poderonIF/python2uri", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nn1 = input(\"\") # A variável 'n1' irá armazenar o valor a ser inserido pelo input, lembrando que input nativamente recebe apenas numeros.\nn2 = input(\"\") # A variável 'n2' irá armazenar o valor a ser inserido pelo input.\nPROD = n1 * n2 # A variável PROD irá armazenar o produto da variável 'n1' e 'n2', isto é, ambas variáveis serão multiplicadas e armazenada.\nprint \"PROD = %d\" % (PROD) # Será printada o resultado do produto na tela, em que é utilizado o formatador de inteiro '%d' na string, posteriormente o formatador irá receber o valor de 'PROD' - '% (PROD)'.\n" }, { "alpha_fraction": 0.636484682559967, "alphanum_fraction": 0.6657789349555969, "avg_line_length": 52.64285659790039, "blob_id": "2bb50f67c9e00903080381d11b74ccc1097deeab", "content_id": "18dd38681472cb31788352fc6b3e630379cdbef2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 770, "license_type": "no_license", "max_line_length": 146, "num_lines": 14, "path": "/1038.py", "repo_name": "poderonIF/python2uri", "src_encoding": "UTF-8", "text": "cod,qty= raw_input().split() #irá pedir as o número das variáveis, junto com a função Split, que irá deixa-las na mesma linha,( exemplo: 5 6 7 8 )\ncod = int (cod) #transforma o número digitado na linha acima em número inteiro.\nqty = int (qty)\nif (cod == 1): # “==” Utilizamos os dois sinais de igual para compararmos se um valor é igual ao outro.\n total = qty * 4 #cria uma variável \"total\" e adiciona uma variável e multiplica pelo preço que está na questão 1038 Uri.\nelif (cod == 2):\n total = qty * 4.5\nelif (cod == 3):\n total = qty * 5\nelif (cod == 4):\n total = qty * 2\nelif (cod == 5):\n total = qty * 1.5\nprint ( \"Total: R$% .2f\" % total) #imprime o valor \"Total\", transformando em Reais e adicionando 2 casas após o ponto decimal.\n" }, { "alpha_fraction": 0.6399999856948853, "alphanum_fraction": 0.6675862073898315, "avg_line_length": 35.25, "blob_id": "11257a3f4e9bd54fe656858da8fb594c24c86501", "content_id": "17e125678df97cbe9d9366bc562a8f05451a73ff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 725, "license_type": "no_license", "max_line_length": 169, "num_lines": 20, "path": "/1060.py", "repo_name": "poderonIF/python2uri", "src_encoding": "UTF-8", "text": "a=input() #Le 6 valores numericos, sendo a,b,c,d,e,f\nb=input() \nc=input()\nd=input()\ne=input()\nf=input()\ncont=0 #Define o valor de uma variavel cont(contador) como 0, que vai definir o numero de valores positivos\nif a>0: #Aqui ele testa todas as variaveis lidas(a,b,c,d,e,f) e testa se elas sao maior que 0(positivo)\n cont+=1 #Se sim, cont sera somada com 1(cont+=1 e a mesma coisa que cont=cont+1), ou seja, o numero de positivos aumenta em 1\nif b>0:\n cont+=1\nif c>0:\n cont+=1\nif d>0:\n cont+=1\nif e>0:\n cont+=1\nif f>0:\n cont+=1\nprint \"%d valores positivos\" % (cont) #Aqui o \"%d\" e substituido pelo que esta dentro do parentese, no caso a variavel cont, como exemplo assim [\"Exemplo\" % (variavel)\"]\n" } ]
11
walterjrbr/python
https://github.com/walterjrbr/python
edc20ddd3fe56eebdd4be56f22a1413286013e78
d8c26baafbfc0ab57f3dcfac0baada22675f85be
ad66f07afde11fffb16f5d76b91628d6e028c899
refs/heads/master
2021-09-16T12:37:43.291093
2018-06-20T19:41:05
2018-06-20T19:41:05
107,187,653
2
0
null
null
null
null
null
[ { "alpha_fraction": 0.7884615659713745, "alphanum_fraction": 0.7884615659713745, "avg_line_length": 16.33333396911621, "blob_id": "724670bf16448677cd3167c3cf61421b10c04dde", "content_id": "d2cf4dc533a9906035ebeebff5ac9625b1a0fe7b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 52, "license_type": "no_license", "max_line_length": 29, "num_lines": 3, "path": "/README.md", "repo_name": "walterjrbr/python", "src_encoding": "UTF-8", "text": "# python\ndev big data\nUsado para testes com Jenkins\n" }, { "alpha_fraction": 0.625, "alphanum_fraction": 0.6824324131011963, "avg_line_length": 28.399999618530273, "blob_id": "f73e01028f5b724f57f66438e880242006e37753", "content_id": "bb7a8ec498106dfbd5ab2248efd036493057d53b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 296, "license_type": "no_license", "max_line_length": 50, "num_lines": 10, "path": "/teste1.py", "repo_name": "walterjrbr/python", "src_encoding": "UTF-8", "text": "arquivo = open(\"/home/walter/log.txt\",\"r\")\npesquisa = raw_input(\"Digite o termo procurado: \")\n#teste print ipesquisa=\"teste55555555555555\"\nprint (pesquisa_teste_ok_08)\nteste\n#for i in arquivo.readlines():\n\t #print i\n #if i.count(teste) > 0:\n # print i\n#print \"nao encontrei o padrao\"\n\n\n" }, { "alpha_fraction": 0.5930232405662537, "alphanum_fraction": 0.6744186282157898, "avg_line_length": 20.5, "blob_id": "e917015349e3fde4da2e2421318c01a31020762f", "content_id": "a58b56ce6777ccd932c8dbea63544c505bcaa92a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 86, "license_type": "no_license", "max_line_length": 31, "num_lines": 4, "path": "/graf2.py", "repo_name": "walterjrbr/python", "src_encoding": "UTF-8", "text": "import matplotlib.pyplot as plt\nplt.plot( [10,5,3,4,6,8] )\nplt.title(\"OK\")\nplt.show()\n" } ]
3
Adoliin/bacweb-scraper
https://github.com/Adoliin/bacweb-scraper
2e50e218a862c47be7aba1a84501394458933b19
f00c1213340b1cee88f459f66563d362f72d6623
89b1174cb166ffccbb101506c031dd6fd60c2fd0
refs/heads/master
2023-02-20T22:31:26.979268
2021-01-17T16:30:05
2021-01-17T16:30:05
291,565,496
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.6032324433326721, "alphanum_fraction": 0.6116548776626587, "avg_line_length": 29.089040756225586, "blob_id": "ea6c9ecdcfdb1c640d06ef3ad6456d0c146db3be", "content_id": "62ffccbadcb54fc95a8cc0760bcc4e3e656782dd", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4398, "license_type": "permissive", "max_line_length": 81, "num_lines": 146, "path": "/main.py", "repo_name": "Adoliin/bacweb-scraper", "src_encoding": "UTF-8", "text": "import os\nimport requests\nimport bs4\nimport sys\narguments = sys.argv\n\n# -- GLOBAL VARIABLES --\noptionList = [\n 'Allemand',\n 'Espagnol',\n 'Russe',\n 'chinois',\n 'Turque',\n 'Italien',\n 'Ã\\x89ducation Musicale',\n 'Arts & Plastiques',\n 'Théâtre'\n]\nsections_g = [\n 'math',\n 'science',\n 'economie',\n 'technique',\n 'lettres',\n 'sport',\n 'info',\n]\nbacDir = os.path.join(os.getcwd(),'bac')\n\ndef main():\n sectionNum = menu()\n subjectList = getSubjectList()\n if sectionNum == 8:\n for i in range(7):\n getSection(subjectList, i+1)\n else:\n getSection(subjectList, sectionNum)\n\ndef menu():\n print('Choose section(s) to download:')\n print('[1] Math')\n print('[2] Science')\n print('[3] Economie')\n print('[4] Technique')\n print('[5] Lettres')\n print('[6] Sport')\n print('[7] Info')\n print('[8] ALL')\n while True:\n ans = input('--> ')\n if ans in ['1', '2', '3', '4', '5', '6', '7', '8']:\n return int(ans)\n else:\n print('You must pick a number from the menu!')\n continue\n\ndef getSubjectList():\n mainPageSource = requests.get('http://www.bacweb.tn/section.htm')\n soup = bs4.BeautifulSoup(mainPageSource.text, 'lxml')\n return soup.find_all('tbody')[0].find_all('tr')\n\ndef getProjectDir(section):\n #create bac folder if it dosent exist and chdir into it\n projectDir = os.path.join(bacDir, f'bac-{section}')\n if os.path.exists(projectDir) == False :\n os.makedirs(projectDir)\n os.chdir(projectDir)\n return projectDir\n\ndef getSection(subjectList, sectionNum):\n sectionName = sections_g[sectionNum-1]\n global projectDir\n projectDir = getProjectDir(sectionName)\n print(f'\\n~~~Downloading \"{sectionName}\" section:~~~')\n for subject in subjectList:\n sectionList = subject.find_all('td')\n try:\n subjectName = sectionList[0].text\n except:\n pass\n else:\n sectionSubject = sectionList[sectionNum].select('a')\n if len(sectionSubject) != 0:\n linkToSubject = 'http://www.bacweb.tn/'+sectionSubject[0]['href']\n if subjectName in optionList:\n # print('OPTION : '+subjectName)\n pass\n else:\n getSubject(linkToSubject, subjectName)\n\ndef getSubject(linkToSubject, subjectName):\n print(f'Downloading all of \"{subjectName}\" exams of current section.')\n subjectPageSource = requests.get(linkToSubject)\n soup = bs4.BeautifulSoup(subjectPageSource.text, 'lxml')\n yearsList = soup.find_all('tr')\n for year in yearsList:\n subjectsByYear = year.find_all('td')\n try:\n yearNumber = int(subjectsByYear[0].text)\n except:\n pass\n else:\n getYear(yearNumber, subjectsByYear)\n\ndef getYear(yearNumber, subjectsByYear):\n yearNumberDir = os.path.join(projectDir, str(yearNumber))\n if os.path.exists(yearNumberDir) == False :\n os.makedirs(yearNumberDir)\n os.chdir(yearNumberDir)\n\n sessionDir_P = os.path.join(yearNumberDir, 'principale')\n if os.path.exists(sessionDir_P) == False :\n os.makedirs(sessionDir_P)\n\n sessionDir_C = os.path.join(yearNumberDir, 'controle')\n if os.path.exists(sessionDir_C) == False :\n os.makedirs(sessionDir_C)\n\n principale_sujet = subjectsByYear[1].find_all('a')\n getSujet(principale_sujet, yearNumberDir, 'principale')\n\n principale_corrige = subjectsByYear[2].find_all('a')\n getSujet(principale_corrige, yearNumberDir, 'principale')\n\n controle_sujet = subjectsByYear[3].find_all('a')\n getSujet(controle_sujet, yearNumberDir, 'controle')\n\n controle_corrige = subjectsByYear[4].find_all('a')\n getSujet(controle_corrige, yearNumberDir, 'controle')\n\n os.chdir(projectDir)\n\ndef getSujet(sujet, yearNumberDir, promotion):\n if len(sujet) != 0:\n sujetLink = 'http://www.bacweb.tn/'+sujet[0]['href']\n p = sujetLink.rindex('/')\n sujetName = sujetLink[p+1:]\n promotionDir = os.path.join(yearNumberDir, promotion)\n os.chdir(promotionDir)\n sujetDir = os.path.join(promotionDir, sujetName)\n if os.path.exists(sujetDir) == False:\n os.system(f'wget \"{sujetLink}\" &> /dev/null')\n os.chdir(projectDir)\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.7645502686500549, "alphanum_fraction": 0.7680776119232178, "avg_line_length": 28.842105865478516, "blob_id": "f75c450d1098c6e31d3ff96edc165bd1f61f802e", "content_id": "467df27aa0abbd3a49fb5091e73e8c8d731aa1e2", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1134, "license_type": "permissive", "max_line_length": 98, "num_lines": 38, "path": "/README.md", "repo_name": "Adoliin/bacweb-scraper", "src_encoding": "UTF-8", "text": "# Bacweb Scraper\nA simple python program to download all tests of the \"Tunisian Baccalaureate\"\nof all available sections.\n## Getting Started\n\n### Prerequisites\nAs specified in the \"pyproject.toml\", these are the modules dependencies for this project:\n- python 3.9\n- requests\n- bs4\n- lxml\n\nExternal dependency:\n- wget (for downloading the tests PDFs)\n \n### Installing\nTo install it just do so by pressing the \"Code\" button and then press \"Download ZIP\".\nOr by git cloning this repository like this:\n`git clone https://github.com/adoliin/bacweb-scraper`\n\n## Usage\nFrom GUI:\nGo to the project folder and press the \"main.py\" file\n\nFrom command line:\nGo to the project directory to the:\n`cd bacweb-parser`\nAnd execute the program:\n`python main.py`\n\nAfter executing the program, a menu will appear and you can\ntype the number of the section you want to download the tests from or\ntype \"8\" to download all available tests from all sections.\nThe program will then create a \"bac\" directory containing all the tests ordered\nby year.\n\n## License\nThis project is licensed under the MIT License - see the [LICENSE.md](LICENSE.md) file for details\n" } ]
2
Park-Mi-Ran/test
https://github.com/Park-Mi-Ran/test
9036e0ffa0039c9f60c767a8b1ada198ad99a8e6
8a77caaf8a0b2e91e81fd56ebbd9ba6b993b8b03
728ade93baacba396135a2914b5b570beefddd84
refs/heads/master
2023-07-24T08:47:55.812571
2021-08-13T10:23:32
2021-08-13T10:23:32
395,610,064
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5431472063064575, "alphanum_fraction": 0.5913705825805664, "avg_line_length": 20.88888931274414, "blob_id": "8f3dc7902f6e0310b306a89071816392e7a905fb", "content_id": "42d4051ff81c9ce7b7a76ba5b91a5351d2458f08", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 394, "license_type": "no_license", "max_line_length": 51, "num_lines": 18, "path": "/onlineclub/post/migrations/0007_clubs_club_detail_id.py", "repo_name": "Park-Mi-Ran/test", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-08-12 19:47\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('post', '0006_clubs_recruitment_content'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='clubs',\n name='club_detail_id',\n field=models.IntegerField(null=True),\n ),\n ]\n" }, { "alpha_fraction": 0.8163265585899353, "alphanum_fraction": 0.8163265585899353, "avg_line_length": 28.600000381469727, "blob_id": "9efd4694cdf4f454ac41b0fa91207fac6b2ff98b", "content_id": "3d86cb5540cc94b05762154adf1c0736e176be36", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 147, "license_type": "no_license", "max_line_length": 32, "num_lines": 5, "path": "/onlineclub/post/admin.py", "repo_name": "Park-Mi-Ran/test", "src_encoding": "UTF-8", "text": "from django.contrib import admin\nfrom post.models import *\n# Register your models here.\nadmin.site.register(Posts)\nadmin.site.register(Recruitment)" }, { "alpha_fraction": 0.6066328883171082, "alphanum_fraction": 0.6269000172615051, "avg_line_length": 24.541175842285156, "blob_id": "eb39374f8c073be6806dec2c73beea963233460c", "content_id": "84524478b648de6e369afaeaa174b36e5f7d4526", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3511, "license_type": "no_license", "max_line_length": 85, "num_lines": 85, "path": "/readme.md", "repo_name": "Park-Mi-Ran/test", "src_encoding": "UTF-8", "text": "동아리 박람회를 대체하기 위해 동아리 연합회를 진행할 예정입니다. \n\n- 백엔드 : 박미란, 박현서, 이지수, 최정은, 함승우, 황한슬\n- 프론트엔드 : 김지연, 박세연, 이민주, 이보람, 주미진 (보조: 박미란, 성예지)\n- 디자인 : 윤예빈 (보조: 성예지)\n\n# 세팅 방법\ngit clone을 합니다. \n```bash\ngit clone https://github.com/SYULION9TH/2021-syu-club.git\n```\n## github branch 설계 규칙\n### Branch 확인 하기\n### backend와 frontend로 banch를 나눠놨으니 본인이 해당하는 브랜치에 들어가 본인의 브랜치 생성하기 \n\n1. 현재 내가 위치한 Branch 확인\n - `$git branch`\n2. 원격 저장소의 브랜치 확인\n - `$git branch -r`\n3. 브랜치의 마지막 커밋 메세지 확인\n - `$git branch -v`\n\n### Branch 생성 및 이동\n1. Branch 생성하기\n - git branch 브랜치명\n - `$git branch test`\n2. 생성한 Branch로 이동하기\n - git checkout 브랜치명\n - `$git checkout test`\n\n### Branch 삭제\ngit branch -d 브랜치명 \n`$git branch -d test`\n\n### branch 병합 Git Merge\n`$git merge 브랜치명`\n\n### master branch로 이동\n`$git checkout master`\n\n# 개발 시작합시다. \n\n`2021-syu-club/onlineclub`폴더가 있는 위치에서 가상환경(`$source myvenv/bin/activate`)을 실행해 줍니다. \n가상환경 실행 후 `requirements.txt`가 있는 위치에서 `$pip install -r requirements.txt`명령어를 입력합니다. \n`$python manage.py runserver` 이후 `127.0.0.1:8000/admin`으로 들어가서 제대로 되는지 확인합니다.\n\n```text\n※python manage.py migrate는 하지마세요※ \nid : dev \npassword : 1234 \n``` \n\n## 동아리 앱 폴더:\n- 동아리 - 황한슬, 박미란, 박현서\n - 박미란 : 모집요강, 활동사진 수정\n - 황한슬 : 동아리 목록 정렬(디데이, 동아리 순위, 랜덤)\n - 박현서 : 동아리 검색 (메인페이지, 사이드바)\n - models = ClubTypes, Clubs\n- 게시물 - 최정은\n - 게시글 생성, 수정, 삭제\n - models = Posts\n- account -함승우\n - 관리자 로그인\n - models - AuthUser\n\n## URL 설계\n1. 자원의 컬렉션 이름으로는 복수형을 쓴다. ex) `/Post/1 -> /posts/1`\n2. http의 Method가 들어가서는 안된다.\n3. 동사표현을 쓰면 안된다. ex) `/posts/show/1 -> GET /posts/1`\n4. 경로 중 변하는 값은 유일한 값으로 바꾼다. ex) id가 12인 게시물을 지우는 행위 `DELETE /posts/12`\n5. '/'는 계층관계를 나타내는데 사용한다.\n6. URI 마지막 문자로 슬래시(/ )를 포함하지 않는다.\n7. 대문자는 쓰지 않고 소문자만 쓴다.\n8. 하이픈(- )은 URI 가독성을 높이는데 사용 불가피하게 긴 URI경로를 사용하게 된다면 하이픈을 사용해 가독성을 높인다.\n9. 밑줄(\\_ )은 URI에 사용하지 않는다. 밑줄은 보기 어렵거나 밑줄 때문에 문자가 가려지기도 하므로 가독성을 위해 밑줄은 사용하지 않는다.\n10. 리소스 간에 연관 관계가 있는 경우 ex) 리소스명/{리소스ID}/관계가 있는 다른 리소스 명 --> `posts/1/comments`\n\nQnA 예시)\n\n|설명|Method|경로|\n|----|-------|----|\n|한 동아리의 QnA목록을 나타낸다.|GET|/clubs/:id/qna|\n|한 동아리의 QnA상세를 나타낸다.|GET|/clubs/:id/qna/:id|\n|한 동아리의 QnA를 수정한다.|PUT|/clubs/:id/qna/:id|\n|한 동아리의 QnA를 삭제한다.|DELETE|/clubs/:id/qna/:id|\n" }, { "alpha_fraction": 0.6862302422523499, "alphanum_fraction": 0.6941308975219727, "avg_line_length": 29.586206436157227, "blob_id": "379299dd9a8d46dfdb038aea696ed0752feee9e3", "content_id": "4311f88313f4c26f0f6c42d1b9a63bd9b51b1fcf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 924, "license_type": "no_license", "max_line_length": 90, "num_lines": 29, "path": "/onlineclub/post/views/postView.py", "repo_name": "Park-Mi-Ran/test", "src_encoding": "UTF-8", "text": "from django.db import models\nfrom django.shortcuts import render, redirect, get_object_or_404\nfrom django.utils import timezone\nfrom post.models import *\n\n\ndef home(request):\n posts = Posts.objects.all()\n return render(request, 'home.html', {'posts':posts})\n\ndef detail(request, id):\n club = get_object_or_404(Clubs, club_id=id)\n return render(request, 'detail.html', {'club':club})\n\ndef new(request):\n return render(request, 'new.html')\n\ndef create(request):\n new_club= Clubs()\n new_club.club_desc = request.POST.get('club_desc','설명을 입력해주세요')\n new_club.recruitment_content = request.POST.get('recruitment_content','모집요강 작성 란입니다.')\n new_club.save()\n return redirect('detail', str(new_club.club_id))\n\ndef delete(request, id):\n delete_post = Posts.objects.get(post_id=id)\n delete_post.is_deleted = 1\n delete_post.delete()\n return redirect('home')" }, { "alpha_fraction": 0.6813627481460571, "alphanum_fraction": 0.6813627481460571, "avg_line_length": 37.46154022216797, "blob_id": "f1339318a62b542c4a3486990edad6d5e75c448f", "content_id": "02ade418bd3f7094b2ea17f2080075364ed3d009", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 499, "license_type": "no_license", "max_line_length": 100, "num_lines": 13, "path": "/onlineclub/post/urls.py", "repo_name": "Park-Mi-Ran/test", "src_encoding": "UTF-8", "text": "from django.contrib import admin\nfrom django.urls import path, include\nfrom post.views import postView, postEditView\n\nurlpatterns = [\n path('', postEditView.club, name=\"home\"),\n path('<int:id>', postView.detail, name=\"detail\"),\n path('new/', postView.new, name=\"new\"),\n path('create/', postView.create, name=\"create\"),\n path('<int:id>/delete', postView.delete, name=\"delete\"),\n path('<int:id>/edit/<int:club_detail_id>', postEditView.detailpage_update, name=\"detailupdate\"),\n \n]" }, { "alpha_fraction": 0.5130208134651184, "alphanum_fraction": 0.5295138955116272, "avg_line_length": 25.79069709777832, "blob_id": "643bec32c70cc9df3c78e6c77e4a5334c43f10f7", "content_id": "df02e9ad81eb98acbd5eed26be2cba553f69d05c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1152, "license_type": "no_license", "max_line_length": 50, "num_lines": 43, "path": "/onlineclub/post/migrations/0004_auto_20210813_0406.py", "repo_name": "Park-Mi-Ran/test", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-08-12 19:06\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('post', '0003_alter_posts_user'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='clubs',\n name='created_at',\n field=models.DateTimeField(null=True),\n ),\n migrations.AlterField(\n model_name='clubs',\n name='deadline',\n field=models.IntegerField(null=True),\n ),\n migrations.AlterField(\n model_name='clubs',\n name='end_day',\n field=models.DateTimeField(null=True),\n ),\n migrations.AlterField(\n model_name='clubs',\n name='established',\n field=models.DateTimeField(null=True),\n ),\n migrations.AlterField(\n model_name='clubs',\n name='rank',\n field=models.IntegerField(null=True),\n ),\n migrations.AlterField(\n model_name='clubs',\n name='updated_at',\n field=models.DateTimeField(null=True),\n ),\n ]\n" }, { "alpha_fraction": 0.6734693646430969, "alphanum_fraction": 0.6757369637489319, "avg_line_length": 33.78947448730469, "blob_id": "4a4ef5f0d9c950f8f046d73e3e1649840655fe71", "content_id": "729111bbb8c12cf5a7866f9561794e16b8eb7ea5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1363, "license_type": "no_license", "max_line_length": 95, "num_lines": 38, "path": "/onlineclub/post/views/postEditView.py", "repo_name": "Park-Mi-Ran/test", "src_encoding": "UTF-8", "text": "from django.shortcuts import render, redirect, get_object_or_404\nfrom django.utils import timezone\nfrom post.models import *\n\n\n# def post_edit(request, id):\n# edit_post = Posts.objects.get(post_id=id)\n# return render(request, 'post/edit.html', {'post':edit_post})\n\n# def post_update(request, id):\n# update_post = Posts.objects.get(post_id=id)\n# update_post.post_title = request.POST['post_title']\n# update_post.post_content = request.POST['post_content']\n# update_post.post_img_url = request.POST['post_img_url']\n# update_post.updated_at = timezone.now()\n# update_post.save()\n# return redirect('detail', str(update_post.post_id))\n\n\ndef club(request):\n #동아리 이름과 동아리 한줄 설명 가져오기 위해서\n club = Clubs.objects.all()\n return render(request, 'home.html', {'club':club})\n\n\n\ndef detailpage_update(request, id, pk):\n update_detailpage = Clubs.objects.get(club_id=id, club_detail_id=pk)\n\n if request.method ==\"POST\":\n \n update_detailpage.club_desc = request.POST.get('club_desc')\n update_detailpage.recruitment_content = request.POST.get('recruitment_content')\n update_detailpage.save()\n return redirect('detail' , str(update_detailpage.club_id))\n\n else:\n return render(request,'edit.html',{'club':club}, str(update_detailpage.club_detail_id))\n\n" }, { "alpha_fraction": 0.5400522947311401, "alphanum_fraction": 0.5540765523910522, "avg_line_length": 46.2696647644043, "blob_id": "4bc03d6e70cb75d941074cf2ad8e4dde4c1f52d7", "content_id": "7cb02ab2d0c8c1a17608aa9f001578c223499925", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4207, "license_type": "no_license", "max_line_length": 139, "num_lines": 89, "path": "/onlineclub/post/migrations/0001_initial.py", "repo_name": "Park-Mi-Ran/test", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-08-10 18:38\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='AuthUser',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('password', models.CharField(max_length=128)),\n ('is_superuser', models.IntegerField()),\n ('username', models.CharField(max_length=150, unique=True)),\n ('email', models.CharField(max_length=254)),\n ('is_staff', models.IntegerField()),\n ('is_active', models.IntegerField()),\n ],\n ),\n migrations.CreateModel(\n name='Clubs',\n fields=[\n ('club_id', models.AutoField(primary_key=True, serialize=False)),\n ('club_name', models.CharField(blank=True, max_length=200, null=True)),\n ('club_desc', models.CharField(blank=True, max_length=200, null=True)),\n ('club_img_url', models.CharField(blank=True, max_length=500, null=True)),\n ('club_logo_url', models.CharField(blank=True, max_length=500, null=True)),\n ('established', models.DateTimeField()),\n ('created_at', models.DateTimeField()),\n ('updated_at', models.DateTimeField()),\n ('end_day', models.DateTimeField()),\n ('deadline', models.IntegerField()),\n ('rank', models.IntegerField()),\n ('sns_link', models.CharField(blank=True, max_length=500, null=True)),\n ('form_link', models.CharField(blank=True, max_length=500, null=True)),\n ],\n ),\n migrations.CreateModel(\n name='ClubTypes',\n fields=[\n ('club_type_id', models.AutoField(primary_key=True, serialize=False)),\n ('club_type_name', models.IntegerField(blank=True, null=True)),\n ('club_type_desc', models.CharField(blank=True, max_length=200, null=True)),\n ('club_type', models.IntegerField(blank=True, null=True)),\n ],\n ),\n migrations.CreateModel(\n name='Recruitment',\n fields=[\n ('recruitment_id', models.AutoField(primary_key=True, serialize=False)),\n ('recruitment_content', models.TextField(blank=True, null=True)),\n ('start_date', models.DateTimeField()),\n ('end_date', models.DateTimeField()),\n ('is_staff', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='post.authuser')),\n ],\n ),\n migrations.CreateModel(\n name='Posts',\n fields=[\n ('post_id', models.AutoField(primary_key=True, serialize=False)),\n ('post_title', models.CharField(max_length=150)),\n ('post_content', models.CharField(max_length=3000)),\n ('post_introduce', models.CharField(blank=True, max_length=200, null=True)),\n ('post_img_url', models.CharField(blank=True, max_length=1500, null=True)),\n ('created_at', models.DateTimeField()),\n ('updated_at', models.DateTimeField()),\n ('is_deleted', models.IntegerField()),\n ('club', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='post.clubs')),\n ('user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='post.authuser')),\n ],\n ),\n migrations.AddField(\n model_name='clubs',\n name='club_type',\n field=models.OneToOneField(on_delete=django.db.models.deletion.DO_NOTHING, to='post.clubtypes'),\n ),\n migrations.AddField(\n model_name='clubs',\n name='user',\n field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='post.authuser'),\n ),\n ]\n" }, { "alpha_fraction": 0.590062141418457, "alphanum_fraction": 0.6293995976448059, "avg_line_length": 24.421052932739258, "blob_id": "7ad09dcdeb85fb908261d7ac13085a0875efe7b4", "content_id": "97a0f29d4420c5f65d501c1c2f41a36494f35750", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 483, "license_type": "no_license", "max_line_length": 115, "num_lines": 19, "path": "/onlineclub/post/migrations/0003_alter_posts_user.py", "repo_name": "Park-Mi-Ran/test", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-08-10 19:07\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('post', '0002_alter_authuser_username'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='posts',\n name='user',\n field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='post.authuser'),\n ),\n ]\n" }, { "alpha_fraction": 0.5667350888252258, "alphanum_fraction": 0.6303901672363281, "avg_line_length": 24.63157844543457, "blob_id": "375a7245fcb1ccd89a3660bca9550f38eba4a735", "content_id": "7ddcec51901b4f2fae86a0626795f22c0e47ab73", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 487, "license_type": "no_license", "max_line_length": 119, "num_lines": 19, "path": "/onlineclub/post/migrations/0005_alter_clubs_club_type.py", "repo_name": "Park-Mi-Ran/test", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-08-12 19:07\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('post', '0004_auto_20210813_0406'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='clubs',\n name='club_type',\n field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='post.clubtypes'),\n ),\n ]\n" } ]
10
hollandlive/php5FrameWork
https://github.com/hollandlive/php5FrameWork
a65bed2ff96ef6680e0926b562aa2b10b85a8941
a143bc2cb05e9a0ad4eebf3100f6c4562e1d39ac
02213d50196cdb407feafb6bbd3183baa8c975d7
refs/heads/master
2021-01-23T20:55:04.891184
2015-02-05T11:23:49
2015-02-05T11:23:49
27,922,349
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5352112650871277, "alphanum_fraction": 0.577464759349823, "avg_line_length": 11, "blob_id": "d62ba1fd4fb8c8c25e4d0505cab49d7d339e443e", "content_id": "79b2454092fa74d1ef7ab508e4b29c1b6e8bd7dc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 71, "license_type": "no_license", "max_line_length": 20, "num_lines": 6, "path": "/pr1.py", "repo_name": "hollandlive/php5FrameWork", "src_encoding": "UTF-8", "text": "print \"Hello, Artem\"\nn = 5\nwhile n > 0:\n\tprint n\n\tn = n - 1\nprint 'wow'" }, { "alpha_fraction": 0.4285714328289032, "alphanum_fraction": 0.4285714328289032, "avg_line_length": 4.5, "blob_id": "3c92b5de8de6304976ef2bfbb37dc73ded797428", "content_id": "97bc865ba840c262d07e587c899b501ca426b656", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 21, "license_type": "no_license", "max_line_length": 6, "num_lines": 4, "path": "/library/wrapper.class.php", "repo_name": "hollandlive/php5FrameWork", "src_encoding": "UTF-8", "text": "<?php\n?>\n<nav>\n</nav>" }, { "alpha_fraction": 0.6173912882804871, "alphanum_fraction": 0.626086950302124, "avg_line_length": 21.29032325744629, "blob_id": "ef5d38be7ff6805aa2cdfc3f51bb7679fba02ce9", "content_id": "4d106161a5545f6ceead2d637bf74acf7d232f11", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 695, "license_type": "no_license", "max_line_length": 71, "num_lines": 31, "path": "/indexTemp.php", "repo_name": "hollandlive/php5FrameWork", "src_encoding": "UTF-8", "text": "<?php\nsession_start();\nheader('Content-Type: text/html; charset=utf-8');\n?>\n\n<!-- html with php files included -->\n\n<!-- head -->\n<!DOCTYPE html>\n<html lang=\"ru\">\n<head>\n\t\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\" />\n\t<title>Веб-ТВ</title>\n\t<link href=\"css/bootstrap.css\" rel=\"stylesheet\" />\n\t<link rel=\"stylesheet\" type=\"text/css\" href=\"style.css\" />\n\t<meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">\n\n\t<!-- [if IE] -->\n\t<script src=\"http://html5shiv.google.com/svn/trunk/html5.js\"></script>\n</head>\n\n<!-- body -->\n\t<body>\n\t\n<!-- wrapper -->\n\t\t<div id=\"wrapper\">\n\t\t\t<?php require_once 'library/wrapper.class.php'; ?>\n\t\t</div>\n\t</body>\n</html>" }, { "alpha_fraction": 0.6823821067810059, "alphanum_fraction": 0.6947891116142273, "avg_line_length": 30.076923370361328, "blob_id": "c879941d99a5a5aaacb9bead6087ba11a16204c9", "content_id": "d432cfd731c7be92a6e69491226fc40d6792b6af", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 403, "license_type": "no_license", "max_line_length": 81, "num_lines": 13, "path": "/index.php", "repo_name": "hollandlive/php5FrameWork", "src_encoding": "UTF-8", "text": "<?php\nrequire_once 'library/index.class.php';\n$moduleA1 = new Module('This is usual Module');\n$moduleA2 = new Featured_Module('THIS IS UNUSUAL MODULE!');\n$columnA = new Column();\n$columnA->append($moduleA1);\n$columnA->append($moduleA2);\n$columnA->append(new Module('This is a module A3')); //this way its also possible\n$columnA->render();\n$page = new Page();\n$page->append($columnA);\n$page->render();\n?>" }, { "alpha_fraction": 0.42597541213035583, "alphanum_fraction": 0.42811331152915955, "avg_line_length": 28.234375, "blob_id": "b938749b7d1eec8aabf327186f873e11de694357", "content_id": "e63235227e78a3e118ccda9055f996f364be84ba", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 1871, "license_type": "no_license", "max_line_length": 111, "num_lines": 64, "path": "/wp-apm-temp.php", "repo_name": "hollandlive/php5FrameWork", "src_encoding": "UTF-8", "text": "<?php\n\n/* * Plugin Name: Advertisement \n * \n *Plugin URI: http://dutchnews.ru/ \n *Author URI: http://bwawwp.com \n ** Description: This is a working folder of the plugin. \n ** Author: Artem, Rotterdam\n *\n ** Version: 1.0.0 \n ** \n ** License: GPLv2 */\n\t\t\nfunction at_wp_footer($variableA) {\n\t\t\t$variableA = \"I didn't have much time to work on these plugins so I have offered another possible solution\";\n\t\t\techo $variableA;\n\t}\nadd_action ('wp_footer', 'at_wp_footer');\n\n\n\nfunction at_load_scripts() { \n\tif (is_admin()) { \n\t\twp_enqueue_script( \n\t\t'advertisement-plugin-admin', \n\t\tplugins_url('js/admin.js', __FILE__ ), \n\t\tarray('jquery'), \n\t\tADVERTISEMENT_VERSION \n\t\t); \n\t} else { \n\t\twp_enqueue_script( \n\t\t'advertisement-plugin-frontend', \n\t\tplugins_url('js/frontend.js', __FILE__ ), \n\t\tarray('jquery'), \n\t\tADVERTISEMENT_VERSION \n\t\t); \n\t} \n} \nadd_action('init', 'at_load_scripts');\n\n\n\nfunction at_load_styles() { \n\tif (is_admin()) { \n\t\twp_enqueue_style( \n\t\t'advertisement-plugin-admin', \n\t\tplugins_url('css/admin.css', __FILE__ ), \n\t\tarray(), \n\t\tADVERTISEMENT_VERSION, \n\t\t'screen' \n\t\t); \n\t} else { \n\t\twp_enqueue_style( \n\t\t'advertisement-plugin-frontend', \n\t\tplugins_url('css/frontend.css', __FILE__ ), \n\t\tarray(), \n\t\tADVERTISEMENT_VERSION, \n\t\t'screen' \n\t\t); \n\t} \n} \n\nadd_action('init', 'at_load_styles'); \n?>\n" }, { "alpha_fraction": 0.7474256753921509, "alphanum_fraction": 0.7621915936470032, "avg_line_length": 58.17241287231445, "blob_id": "c96483f6f5bfbba98dd7af2e61b11a58a6dcdc32", "content_id": "2cc9421639cc6507a055130276b55d2945a7aa0a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 7861, "license_type": "no_license", "max_line_length": 565, "num_lines": 87, "path": "/indexResponsive.php", "repo_name": "hollandlive/php5FrameWork", "src_encoding": "UTF-8", "text": "<?php\nheader('Content-Type: text/html; charset=utf-8');\n?>\n<!-- html with php files included -->\n\n<!-- head -->\n<!DOCTYPE html>\n<html lang=\"ru\">\n<head>\n\t\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\" />\n\t<title>Веб-ТВ</title>\n\t<link href=\"css/bootstrap.css\" rel=\"stylesheet\" />\n\t<link rel=\"stylesheet\" type=\"text/css\" href=\"style.css\" />\n\t<meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">\n\n\t<!-- [if IE] -->\n\t<script src=\"http://html5shiv.google.com/svn/trunk/html5.js\"></script>\n</head>\n\n<!-- body -->\n<body>\n<div id=\"wrapper\">\n<header>\n<div class=\"header-photo\"> \n <img src=\"images/billboard01.jpg\" class=\"img-responsive\">\n</div>\n</header>\n<nav>\n<ul>\n<li class='active'><a href=\"#\">Intro</a></li>\n<li><a href=\"#\">Intro</a></li>\n<li><a href=\"#\">Intro</a></li>\n<li><a href=\"#\">Intro</a></li>\n</ul>\n</nav>\n<aside class=\"sidebar\">\n<div id=\"social\">\n<tr>\n<td><a href=\"http://www.facebook.com\" target=\"_blank\"><img src=\"images/social-facebook.png\"></a></td>\n<td><a href=\"http://www.facebook.com\" target=\"_blank\"><img src=\"images/social-facebook.png\"></a></td>\n<td><a href=\"http://www.facebook.com\" target=\"_blank\"><img src=\"images/social-facebook.png\"></a></td>\n<td><a href=\"http://www.facebook.com\" target=\"_blank\"><img src=\"images/social-facebook.png\"></a></td>\n</tr>\n</div>\n</aside>\n\n\n<section class=\"left-col\">\n<h3>My boxer dog</h3>\n<img src=\"images/mcdonalds-150x140.jpg\"/>\n<p>It is a long established fact that dutchnews.ru i populair</p>\n</section>\n\n\n<aside class=\"sidebar\">\n<h3>Size</h3>\n<img src=\"images/mcdonalds-150x140.jpg\"/>\n<p>Время летит быстро. И не успели отшуметь залпы Новогодних фейерверков в Голландии, и рассеяться пороховой дым от них, как наступила первая рабочая неделя. За время выходных в редакцию поступали сообщения, но одно затронуло наше внимание настолько, что мы решили опубликовать его без комментариев. Стиль, язык и орфография автора сохранены, имена изменены в целях сохранения безопасности данных о частных лицах. А также напомнить о нашем дисклаймере – информация, публикующаяся на сайте не обязательно совпадает с мнением владельца домена и редакционной коллегии.\n\nКак полагается, это письмо, пришедшее на электронную почту нашему корреспонденту из мерии городка, где данный человек собирался заключить брак. Дата подачи заявления была 24 декабря 2014 года. Подача заявления прошла успешно. Дата регистрации была выбрана 5 января 2015 года чере электронный портал мерии, в котором данная дата была свободна. Спустя несколько дней данный человек получил звонок из мэрии с просьбой перенести дату бракосочетания на другое число, так как 5-го января в мэрии “Новогодний завтрак для сотрудников”<p>\n</aside>\t\n<aside class=\"sidebar\">\n<h3>Size</h3>\n<img src=\"images/mcdonalds-150x140.jpg\"/>\n<p>Время летит быстро. И не успели отшуметь залпы Новогодних фейерверков в Голландии, и рассеяться пороховой дым от них, как наступила первая рабочая неделя. За время выходных в редакцию поступали сообщения, но одно затронуло наше внимание настолько, что мы решили опубликовать его без комментариев. Стиль, язык и орфография автора сохранены, имена изменены в целях сохранения безопасности данных о частных лицах. А также напомнить о нашем дисклаймере – информация, публикующаяся на сайте не обязательно совпадает с мнением владельца домена и редакционной коллегии.\n\nКак полагается, это письмо, пришедшее на электронную почту нашему корреспонденту из мерии городка, где данный человек собирался заключить брак. Дата подачи заявления была 24 декабря 2014 года. Подача заявления прошла успешно. Дата регистрации была выбрана 5 января 2015 года чере электронный портал мерии, в котором данная дата была свободна. Спустя несколько дней данный человек получил звонок из мэрии с просьбой перенести дату бракосочетания на другое число, так как 5-го января в мэрии “Новогодний завтрак для сотрудников”<p>\n</aside>\n\n<aside class=\"sidebar\">\n<h3>Play</h3>\n<img src=\"images/mcdonalds-150x140.jpg\"/>\n<p>Время летит быстро. И не успели отшуметь залпы Новогодних фейерверков в Голландии, и рассеяться пороховой дым от них, как наступила первая рабочая неделя. За время выходных в редакцию поступали сообщения, но одно затронуло наше внимание настолько, что мы решили опубликовать его без комментариев. Стиль, язык и орфография автора сохранены, имена изменены в целях сохранения безопасности данных о частных лицах. А также напомнить о нашем дисклаймере – информация, публикующаяся на сайте не обязательно совпадает с мнением владельца домена и редакционной коллегии.\n\nКак полагается, это письмо, пришедшее на электронную почту нашему корреспонденту из мерии городка, где данный человек собирался заключить брак. Дата подачи заявления была 24 декабря 2014 года. Подача заявления прошла успешно. Дата регистрации была выбрана 5 января 2015 года чере электронный портал мерии, в котором данная дата была свободна. Спустя несколько дней данный человек получил звонок из мэрии с просьбой перенести дату бракосочетания на другое число, так как 5-го января в мэрии “Новогодний завтрак для сотрудников”<p>\n</aside>\n\n<section class=\"left-col\"></section>\n\n\n\t\n<footer>\n</footer>\n</div>\n</body>\n</html>" }, { "alpha_fraction": 0.613050103187561, "alphanum_fraction": 0.6305007338523865, "avg_line_length": 16.586666107177734, "blob_id": "718bf04c2ad4a598605d1754db74a6e2b842a421", "content_id": "af0c0f164f164f9f51ad07333f3757c526abfed6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 1318, "license_type": "no_license", "max_line_length": 64, "num_lines": 75, "path": "/library/index.class.php", "repo_name": "hollandlive/php5FrameWork", "src_encoding": "UTF-8", "text": "<?php\n/*\n* PHP 5 FrameWork \n* version 1.01, 12.12.2014 @15.55, Rotterdam\nauthor: Artem Aksenov @ [email protected]\nlicense Free\nThis framework based on PHP 5 is for rendering a simple web-page\nLive Example @ www.dutchnews.ru/hub/php5FrameWork\n*/\n\nclass Page\n{\n\tprivate $_columns;\n\t\n\tpublic function __construct() {\n\t\t$this->_columns = array();\n\t\t\t}\n\tpublic function append($column) {\n\t\t$this->_columns[] = $column;\n\t}\n\t\n\tpublic function render() {\n\t\techo '<table border=\"2\" background-color: #00ff00>'.PHP_EOL;\n\t\t\n\t\tforeach($this->_columns as $column) {\n\t\t\t$column->render();\n\t\t}\n\t\techo '</table>'.PHP_EOL;\n\t\t}\n}\n\nclass Column {\n\tprivate $_modules;\n\t\n\tpublic function __construct() {\n\t\t$this->_modules = array();\n\t}\n\npublic function append($module) {\n\t$this->_modules[] = $module;\n}\n\npublic function render() {\n\techo '<tr>'.PHP_EOL;\n\t\n\tforeach($this->_modules as $module) {\n\t\t$module->render();\n\t}\n\t\n\techo '</tr>'.PHP_EOL;\n}\n}\n\nclass Module {\n\tprotected $_content;\n\t\n\tpublic function __construct($content) {\n\t\t$this->_content = $content;\n\t}\n\t\n\tpublic function render() {\n\t\techo '<td>'.$this->_content.'</td>'.PHP_EOL;\n\t}\n}\n\nclass Featured_Module extends Module {\n\tpublic function __construct($content) {\n\t\tparent::__construct($content);\n\t}\n\t\n\tpublic function render() {\n\t\techo '<td><del>'.$this->_content.'</del></td>'.PHP_EOL;\n\t}\n}\n?>" }, { "alpha_fraction": 0.6097561120986938, "alphanum_fraction": 0.6097561120986938, "avg_line_length": 9.5, "blob_id": "e73867c3118651dc834746d1bb2a42ca7cf7d477", "content_id": "cfe1ea096f8082ef6fb17979733c154da70333d8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 41, "license_type": "no_license", "max_line_length": 23, "num_lines": 4, "path": "/library/sidebar.class.php", "repo_name": "hollandlive/php5FrameWork", "src_encoding": "UTF-8", "text": "<?php\n?>\n<aside class=\"sidebar\">\n</aside>" } ]
8
soleneulmer/atmos
https://github.com/soleneulmer/atmos
ad585f63e1c1322427218fcc365057bd620723b5
44c7b2ec1ca45ac7ff587a925ebe0f906bf19e22
e8077074a9936c62bfa053bc8a3a45f5a760189f
refs/heads/master
2021-01-11T01:25:24.593139
2017-04-11T06:02:42
2017-04-11T06:02:42
70,706,940
0
1
null
2016-10-12T14:16:22
2016-10-12T15:04:09
2016-10-12T15:25:56
Python
[ { "alpha_fraction": 0.6870952844619751, "alphanum_fraction": 0.7053654193878174, "avg_line_length": 39.03703689575195, "blob_id": "72467db23b106bcdf930011e4b5c73fc3bde4eab", "content_id": "9819cfc053bf02784fca6c73006c4b4b533b7698", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4324, "license_type": "permissive", "max_line_length": 94, "num_lines": 108, "path": "/indicators_molec.py", "repo_name": "soleneulmer/atmos", "src_encoding": "UTF-8", "text": "# ===================================\n# CALCULATES Ioff and Ires\n# Indicators described in Molecfit II\n#\n# Solene 20.09.2016\n# ===================================\n#\nimport numpy as np\nfrom astropy.io import fits\nimport matplotlib.pyplot as plt\n# from PyAstronomy import pyasl\nfrom scipy.interpolate import interp1d\nfrom scipy.interpolate import InterpolatedUnivariateSpline\nfrom scipy import stats\n# from sklearn.metrics import mean_squared_error\n# from math import sqrt\n# from numpy import linalg as LA\n\n# MOLECFIT\n#\nfile_molecfit = '/home/solene/atmos/For_Solene/1203nm/output/molecfit_crires_solene_tac.fits'\nhdu_molecfit = fits.open(file_molecfit)\ndata_molecfit = hdu_molecfit[1].data\ncols_molecfit = hdu_molecfit[1].columns\n# cols_molecfit.info()\nrawwl_molecfit = data_molecfit.field('mlambda')\nwl_molecfit = rawwl_molecfit*10e2\ntrans_molecfit = data_molecfit.field('mtrans')\ncflux_molecfit = data_molecfit.field('cflux')\n\n# TELFIT\n#\nfile_telfit = '/home/solene/atmos/trans_telfit.txt'\nwl_telfit, trans_telfit, wl_datatelfit, flux_datatelfit = np.loadtxt(\n file_telfit, unpack=True)\n\n# Interpolation\nf_molecfit = interp1d(wl_molecfit, cflux_molecfit, kind='cubic')\nftrans_molecfit = interp1d(wl_molecfit, trans_molecfit, kind='cubic')\n# f_tapas = interp1d(wlcorr_tapas, trans_tapas)\n\n# **1** BINNED DATA\n# 3 delta-lambda = 0.036\n# Mean and std deviation of bins on the telluric CORRECTED spectrum\nfluxmean_bin_means, bin_edges, binnumber = stats.binned_statistic(\n wl_datatelfit, f_molecfit(wl_datatelfit), statistic='mean',\n bins=np.floor((wl_datatelfit[-1]-wl_datatelfit[0])/0.036))\n\nfluxstd_bin_means, _, _ = stats.binned_statistic(\n wl_datatelfit, f_molecfit(wl_datatelfit), statistic=np.std,\n bins=np.floor((wl_datatelfit[-1]-wl_datatelfit[0])/0.036))\n\nbin_width = (bin_edges[1] - bin_edges[0])\nbin_centers = bin_edges[1:] - bin_width/2\n\n# **2** Bins where average TRANSMISSION is > 0.99\nflux_trans_mean_bin_means, _, _ = stats.binned_statistic(\n wl_datatelfit, ftrans_molecfit(wl_datatelfit), statistic='mean',\n bins=np.floor((wl_datatelfit[-1]-wl_datatelfit[0])/0.036))\n# cont_bin_means = flux_trans_mean_bin_means[flux_trans_mean_bin_means > 0.99]\nind_cont = np.where(flux_trans_mean_bin_means > 0.99)\nind_out = np.where((flux_trans_mean_bin_means < 0.95) &\n (flux_trans_mean_bin_means > 0.1))\n\n# plt.plot(bin_centers[ind_cont], flux_trans_mean_bin_means[ind_cont], 'kx')\n\n# **3** Interpolation of the continuum cubic\n# f_cont = interp1d(bin_centers[ind_cont], flux_trans_mean_bin_means[ind_cont], kind='cubic')\n# Extrapolation with constant value spline\nf_cont = InterpolatedUnivariateSpline(\n bin_centers[ind_cont], flux_trans_mean_bin_means[ind_cont], ext=3)\n# bbox=[bin_centers[ind_cont][0], bin_centers[ind_cont][-1]],\n\n\n# **5** Subtract cont to mean flux\n# and Divide offset and std by interpolated continuum mean value\nsys_offset = (fluxmean_bin_means - f_cont(bin_centers)) / f_cont(bin_centers)\nflux_std = fluxstd_bin_means / f_cont(bin_centers)\n\n# **6** independant WL = Divide by average absorption\nabsorp_molecfit = 1 - flux_trans_mean_bin_means\nsys_offset_final = sys_offset / absorp_molecfit\nflux_std_final = flux_std / absorp_molecfit\n\nplt.figure(1)\nplt.plot(wl_datatelfit, flux_datatelfit, 'b.-', label='Raw data')\n# plt.hlines(flux_bin_means, bin_edges[:-1],\n# bin_edges[1:], colors='g', lw=5, label='binned statistic of data')\nplt.plot(bin_centers, fluxmean_bin_means, 'rx-', label='Mean binned data')\nplt.plot(bin_centers, fluxstd_bin_means, 'kx-', label='Standard deviation binned data')\nplt.legend()\n\nplt.figure(2)\nplt.plot(wl_datatelfit, flux_datatelfit, 'g.-', label='Data 2nd detector')\nplt.plot(wl_molecfit, trans_molecfit, 'r-', label='Molecfit')\nplt.plot(wl_datatelfit, f_molecfit(wl_datatelfit),\n 'b-', label='Corrected data - Molecfit')\nplt.plot(wl_datatelfit, f_cont(wl_datatelfit),\n 'k-', label='Interpolated Continuum')\nplt.plot(sys_offset_final[ind_out], flux_std_final[ind_out], 'kx')\nplt.plot(flux_trans_mean_bin_means[ind_out],\n sys_offset_final[ind_out], 'kx', label='Ioff vs Transmission')\nplt.plot(flux_trans_mean_bin_means[ind_out],\n flux_std_final[ind_out], 'r.', label='Ires vs Transmission')\nplt.xlabel('Wavelength (nm)')\nplt.ylabel('Transmission')\nplt.legend(loc=3.)\nplt.show()\n" }, { "alpha_fraction": 0.682931661605835, "alphanum_fraction": 0.7009087204933167, "avg_line_length": 38.85826873779297, "blob_id": "d6d9f41327909f5a85577378f8e6a81a66c4bb05", "content_id": "0a4dd34b2fac8ccbb9c4977524b3d7bf01f0a90a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5062, "license_type": "permissive", "max_line_length": 185, "num_lines": 127, "path": "/indicators_molec_xshoo.py", "repo_name": "soleneulmer/atmos", "src_encoding": "UTF-8", "text": "# ===================================\n# CALCULATES Ioff and Ires\n# Indicators described in Molecfit II\n# For X SHOOTER SPECTRA RAQUEL\n# Solene 30.09.2016\n# ===================================\n#\nimport numpy as np\nfrom astropy.io import fits\nimport matplotlib.pyplot as plt\nfrom scipy.interpolate import interp1d\nfrom scipy.interpolate import InterpolatedUnivariateSpline\nfrom scipy import stats\n\n# MOLECFIT\n#\nfile_molecfit = '/home/solene/atmos/raquel_xshooter/output/molecfit_xshoo_raquel2_tac.fits'\nhdu_molecfit = fits.open(file_molecfit)\ndata_molecfit = hdu_molecfit[1].data\ncols_molecfit = hdu_molecfit[1].columns\n# cols_molecfit.info()\nraw_wl_molecfit = data_molecfit.field('lambda')*10e2 # input wl\nraw_flux_molecfit = data_molecfit.field('flux')*10e2 # input flux\nwl_molecfit = data_molecfit.field('mlambda')*10e2 # corrected wl\ntrans_molecfit = data_molecfit.field('mtrans') # transmission flux\ncflux_molecfit = data_molecfit.field('cflux')*10e2 # corrected flux\n# np.sum(np.isnan(cflux_molecfit)) # check for NaN values\n\n# Interpolation\nf_molecfit = interp1d(wl_molecfit, cflux_molecfit) # , kind='cubic')\nftrans_molecfit = interp1d(wl_molecfit, trans_molecfit) # , kind='cubic')\n\n# BIN DATA\n# 3 delta-lambda = 1.07\n# Mean and std deviation of bins on the telluric CORRECTED spectrum\ndelta = 2.142 # 5delta\nfluxmean_bin_means, bin_edges, binnumber = stats.binned_statistic(\n wl_molecfit, cflux_molecfit, statistic='mean',\n bins=np.floor((wl_molecfit[-1]-wl_molecfit[0])/delta))\n\nfluxstd_bin_means, _, _ = stats.binned_statistic(\n wl_molecfit, cflux_molecfit, statistic=np.std,\n bins=np.floor((wl_molecfit[-1]-wl_molecfit[0])/delta))\n\nbin_width = (bin_edges[1] - bin_edges[0])\nbin_centers = bin_edges[1:] - bin_width/2\n\n# Bins where average TRANSMISSION is > 0.99\nflux_trans_mean_bin_means, _, _ = stats.binned_statistic(\n wl_molecfit, trans_molecfit, statistic='mean',\n bins=np.floor((wl_molecfit[-1]-wl_molecfit[0])/delta))\n\nind_cont = np.where(flux_trans_mean_bin_means > 0.99)\nind_out = np.where((flux_trans_mean_bin_means < 0.95) &\n (flux_trans_mean_bin_means > 0.1))\n\n# plt.plot(bin_centers[ind_cont], flux_trans_mean_bin_means[ind_cont], 'kx')\n\n# INTERPOLATION CONTINUUM\n# Interpolation polynomial\nf_cont = interp1d(bin_centers[ind_cont], fluxmean_bin_means[ind_cont], kind='linear', bounds_error=False, fill_value=(fluxmean_bin_means[ind_cont][0], fluxmean_bin_means[ind_cont][-1]))\n\n# Extrapolation with constant value spline\n# f_cont = InterpolatedUnivariateSpline(\n# bin_centers[ind_cont], fluxmean_bin_means[ind_cont], ext=3)\n\n# Subtract cont to mean flux\n# and Divide offset and std by interpolated continuum mean value\nsys_offset = (fluxmean_bin_means - f_cont(bin_centers)) / f_cont(bin_centers)\nflux_std = fluxstd_bin_means / f_cont(bin_centers)\n\n# Independant WL = Divide by average absorption\nabsorp_molecfit = 1 - flux_trans_mean_bin_means\nsys_offset_final = sys_offset / absorp_molecfit\nflux_std_final = flux_std / absorp_molecfit\n\n\n# PLOTTING\n# Figure 2 in Molecfit II Mean+Std\nplt.figure(1)\nplt.plot(raw_wl_molecfit, raw_flux_molecfit, 'g.-', label='Raw data')\n# plt.hlines(flux_bin_means, bin_edges[:-1],\n# bin_edges[1:], colors='g', lw=5, label='binned statistic of data')\nplt.plot(bin_centers, fluxmean_bin_means, 'rx-', label='Mean binned data')\nplt.plot(bin_centers, fluxstd_bin_means, 'kx-', label='Standard deviation binned data')\nplt.legend()\n\n# Indicators Ioff and Ires\nplt.figure(2)\nplt.plot(raw_wl_molecfit, raw_flux_molecfit, 'g.-', label='Raw data')\nplt.plot(wl_molecfit, trans_molecfit, 'r-', label='Molecfit')\nplt.plot(wl_datatelfit, f_molecfit(wl_datatelfit),\n 'b-', label='Corrected data - Molecfit')\nplt.plot(wl_datatelfit, f_cont(wl_datatelfit),\n 'k-', label='Interpolated Continuum')\nplt.plot(sys_offset_final[ind_out], flux_std_final[ind_out], 'kx')\nplt.plot(flux_trans_mean_bin_means[ind_out],\n sys_offset_final[ind_out], 'kx', label='Ioff')\nplt.plot(flux_trans_mean_bin_means[ind_out],\n flux_std_final[ind_out], 'r.', label='Ires')\n\n# Selected continuum points\nplt.figure(3)\nplt.plot(wl_molecfit, cflux_molecfit, 'k-', label='Corrected data')\nplt.plot(bin_centers[ind_cont], fluxmean_bin_means[ind_cont], 'ro', label='Continuum points')\nplt.plot(bin_centers, fluxmean_bin_means, 'b', label='Mean data')\nplt.plot(bin_centers[ind_cont], f_cont(bin_centers[ind_cont]), 'b-')\n\n# Figure 3 in Molecfit II shared axis\nf = plt.figure()\nplt.subplots_adjust(hspace=0.001)\nax1 = plt.subplot(211)\nax1.plot(flux_trans_mean_bin_means[ind_out],sys_offset_final[ind_out], 'k.', label='Ioff')\nplt.ylim(-1, 1)\nplt.ylabel('Ioff')\nax2 = plt.subplot(212, sharex=ax1)\nax2.plot(flux_trans_mean_bin_means[ind_out],flux_std_final[ind_out], 'r.', label='Ires')\nplt.ylabel('Ires')\nplt.ylim(-1, 2)\nplt.xlabel('Transmission')\nxticklabels = ax1.get_xticklabels() + ax2.get_xticklabels()\nplt.setp(xticklabels, visible=True)\n\nplt.xlabel('Wavelength (nm)')\nplt.ylabel('Transmission')\nplt.legend(loc=3.)\nplt.show()\n" }, { "alpha_fraction": 0.6953364014625549, "alphanum_fraction": 0.7138761281967163, "avg_line_length": 36.91304397583008, "blob_id": "9177262a6f27b041bb5998c7722464c0f65e3da3", "content_id": "658e2aa213f28b045cfd62b7909bfc71b26d78cb", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5232, "license_type": "permissive", "max_line_length": 104, "num_lines": 138, "path": "/compareModels.py", "repo_name": "soleneulmer/atmos", "src_encoding": "UTF-8", "text": "# ===================================\n# COMPARE Tapas, Telfit, Molecfit\n# plotting the transmission spectra\n#\n# Solene 14.06.2016\n# ===================================\n#\nimport numpy as np\nfrom astropy.io import fits\nimport matplotlib.pyplot as plt\nfrom PyAstronomy import pyasl\nfrom scipy.interpolate import interp1d\nfrom sklearn.metrics import mean_squared_error\nfrom math import sqrt\nfrom numpy import linalg as LA\n#\n#\n# TAPAS\n# wl and flux classed decreasing, reverse array: array[::-1]\nfile_tapas = '/home/solene/atmos/tapas/crires1203/tapas_000001.ipac'\nrawwl_tapas, rawtrans_tapas = np.loadtxt(file_tapas, skiprows=38, unpack=True)\nwl_tapas = rawwl_tapas[::-1]\ntrans_tapas = rawtrans_tapas[::-1]\n\n# MOLECFIT\n#\nfile_molecfit = '/home/solene/atmos/For_Solene/1203nm/output/molecfit_crires_solene_tac.fits'\nhdu_molecfit = fits.open(file_molecfit)\ndata_molecfit = hdu_molecfit[1].data\ncols_molecfit = hdu_molecfit[1].columns\n# cols_molecfit.info()\nrawwl_molecfit = data_molecfit.field('mlambda')\nwl_molecfit = rawwl_molecfit*10e2\ntrans_molecfit = data_molecfit.field('mtrans')\ncflux_molecfit = data_molecfit.field('cflux')\n\n# TELFIT\n#\nfile_telfit = '/home/solene/atmos/trans_telfit.txt'\nwl_telfit, trans_telfit, wl_datatelfit, flux_datatelfit = np.loadtxt(\n file_telfit, unpack=True)\n\n\n# Cross-correlation\n# from PyAstronomy example\n#\n# TAPAS is the \"template\" shifted to match Molecfit\nrv, cc = pyasl.crosscorrRV(\n wl_molecfit, trans_molecfit, wl_tapas, trans_tapas,\n rvmin=-60., rvmax=60.0, drv=0.1, mode='doppler', skipedge=50)\n\nmaxind = np.argmax(cc)\nprint(\"Cross-correlation function is maximized at dRV = \", rv[maxind], \" km/s\")\n\n\n# Doppler shift TAPAS\n#\nwlcorr_tapas = wl_tapas * (1. + rv[maxind]/299792.)\n# transcorr_tapas, wlcorr_tapas = pyasl.dopplerShift(\n# wl_tapas[::-1], trans_tapas[::-1], rv[maxind],\n# edgeHandling=None, fillValue=None) # Fancy way\n\n\n# RMS between two spectra TAPAS, MOLECFIT\n# do the same with the data and try to better fit the continuum with molecfit\n# Selecting 2nd detector only\n# USELESS\nwlstart = wl_datatelfit[0]\nwlend = wl_datatelfit[-1]\nind_molecfit = np.where((wl_molecfit > wlstart) & (wl_molecfit < wlend))\nwl_molecfit2 = wl_molecfit[ind_molecfit]\ntrans_molecfit2 = trans_molecfit[ind_molecfit]\n\nind_tapas = np.where((wl_tapas > wlstart) & (wl_tapas < wlend))\nwl_tapas2 = wl_tapas[ind_tapas]\ntrans_tapas2 = trans_tapas[ind_tapas]\n\n# Interpolation\n# f_molecfit = interp1d(wl_molecfit, trans_molecfit, kind='cubic') # takes forever...\n# wlcorr_tapasnew = wlcorr_tapas[500:-500] # raw adjustment of the wl limits\n# plt.plot(wl_molecfit, trans_molecfit, 'o', wlcorr_tapasnew, f_molecfit(wlcorr_tapasnew), '.')\n\nf_molecfit = interp1d(wl_molecfit, trans_molecfit)# , kind='cubic') # takes forever...\nf_tapas = interp1d(wlcorr_tapas, trans_tapas)\n\n# Euclidean distance at each point\nstack_molecfit = np.stack((flux_datatelfit, f_molecfit(wl_datatelfit)), axis=-1)\nstack_tapas = np.stack((flux_datatelfit, f_tapas(wl_datatelfit)), axis=-1)\nnorm_molecfit = LA.norm(stack_molecfit, axis=1)\nnorm_tapas = LA.norm(stack_tapas, axis=1)\n# trans_stack = np.stack((trans_tapas[500:-500], f_molecfit(wlcorr_tapasnew)), axis=-1)\n# norm_trans = LA.norm(trans_stack, axis=1)\nplt.plot(wl_datatelfit, norm_tapas, 'r.') # see that the continuum is offset 1.4\nplt.plot(wl_datatelfit, norm_molecfit, 'k.')\n# RMS\nerr_molec = flux_datatelfit - f_molecfit(wl_datatelfit)\nerr_tapas = flux_datatelfit, f_tapas(wl_datatelfit)\n\nrms_molec = sqrt(mean_squared_error(flux_datatelfit, f_molecfit(wl_datatelfit)))\nrms_tapas = sqrt(mean_squared_error(flux_datatelfit, f_tapas(wl_datatelfit)))\n\n\n# Plotting\n#\nplt.figure(1)\nplt.subplot(211)\nplt.plot(wl_datatelfit, flux_datatelfit, 'g.-', label='Data 2nd detector')\nplt.plot(wl_molecfit, trans_molecfit, 'r-', label='Molecfit')\nplt.plot(wl_tapas, trans_tapas, 'b-', label='Tapas')\nplt.title('Comparison atmospheric transmission \\n CRIRES data')\nplt.xlabel('Wavelength (nm)')\nplt.ylabel('Transmission')\nplt.legend(loc=3.)\nplt.subplot(212)\n# plt.plot(wl_tapas, trans_tapas, 'b-', label='Tapas')\nplt.plot(wl_datatelfit, flux_datatelfit, 'g.-', label='Data 2nd detector')\nplt.plot(wl_molecfit, trans_molecfit, 'r-', label='Molecfit')\nplt.plot(wlcorr_tapas, trans_tapas, 'b--', label='Tapas corrected')\n\n# plot 2nd detector only with WL from the data\nplt.plot(wl_datatelfit, flux_datatelfit, 'g.-', label='Data 2nd detector')\nplt.plot(wl_datatelfit, f_molecfit(wl_datatelfit), 'r-', label='Molecfit')\nplt.plot(wl_datatelfit, f_tapas(wl_datatelfit), 'b--', label='Tapas corrected')\n# plt.plot(wl_telfit, trans_telfit, 'r-', label='Telfit')\n\nplt.plot(wl_datatelfit, (flux_datatelfit - f_tapas(wl_datatelfit)), 'b.', label='Tapas residuals')\nplt.plot(wl_datatelfit, (flux_datatelfit - f_molecfit(wl_datatelfit)), 'r.', label='Molecfit residuals')\n\nplt.plot(wl_datatelfit, flux_datatelfit, 'g.-', label='Data 2nd detector')\nplt.plot(wl_molecfit, trans_molecfit, 'r-', label='Molecfit')\nplt.plot(wl_molecfit, cflux_molecfit, 'b-', label='Corrected data - Molecfit')\n\nplt.xlabel('Wavelength (nm)')\nplt.ylabel('Transmission')\n# plt.plot(model.x, model.y, 'k-', label='Gaussian fit')\n# $\\mu=%.2f, \\sigma=%.2f$' %(wavestart, waveend)\nplt.legend(loc=3.)\nplt.show()\n" }, { "alpha_fraction": 0.7807229161262512, "alphanum_fraction": 0.7831325531005859, "avg_line_length": 45.11111068725586, "blob_id": "5d31b2c8de216e923bfaaddeb7f401f8766c8f6f", "content_id": "b151f5455aeb2e5fc4b250768de172de23c1e8ad", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 415, "license_type": "permissive", "max_line_length": 75, "num_lines": 9, "path": "/README.md", "repo_name": "soleneulmer/atmos", "src_encoding": "UTF-8", "text": "# atmos\n#### CARMENES\n*carmenes2molecfit.py:* Creates a single spectrum of the CARMENES orders,\nby merging the overlapping orders.\n- [x] Identifies and merges overlapping regions\n- [x] Corrects wavelengths shift and artefacts in the data\n- [x] Provides rough estimates of the continuum mean and standard deviation\n- [x] Creates exclusion and inclusion ranges for Molecfit\n- [x] Creates input FITS file for Molecfit\n" }, { "alpha_fraction": 0.5678701400756836, "alphanum_fraction": 0.5892539024353027, "avg_line_length": 37.07374572753906, "blob_id": "536316ad63de8f8c361188dfcc3bca309d3c8530", "content_id": "9a51405e2ff48c934c736b1055ff150c15bcf710", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 25814, "license_type": "permissive", "max_line_length": 132, "num_lines": 678, "path": "/carmenes/carmenes2molecfit.py", "repo_name": "soleneulmer/atmos", "src_encoding": "UTF-8", "text": "# CARMENES 2 MOLECFIT\n#\n# Format CARMENES Data in a single spectrum\n# and create the parameter file for Molecfit\n\n# 18 Nov 2016\n# ============================================\n\n# import argparse\n# from gooey import Gooey, GooeyParser\nfrom astropy.io import fits\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport numpy.ma as ma\nimport csv\nfrom operator import itemgetter\nfrom itertools import groupby\nfrom scipy.interpolate import interp1d\nfrom PyAstronomy import pyasl\n# from matplotlib import cm\nfrom astropy.constants import c\nimport time\n\n\ndef corrected_overlap(wavelength_masked, flux_masked, good_overlap):\n \"\"\"\n Creates the new 1D corrected spectrum\n ----------------------------------\n good overlap: contains on each line\n the wl before which to insert overlap\n corrected wl array\n corrected flux array\n \"\"\"\n waves = wavelength_masked.flatten()\n fluxes = flux_masked.flatten()\n # Wavelength without the overlapping regions\n waves_corrected = waves.compressed()\n fluxes_corrected = fluxes.compressed()\n\n for i in range(len(good_overlap)):\n # Indice where the overlappin region ends\n idx = np.where(waves_corrected == good_overlap[i][0])[0]\n to_del = np.where(waves_corrected[:idx] > good_overlap[i][1][0])[0]\n #print to_del\n waves_corrected = np.delete(waves_corrected, to_del)\n fluxes_corrected = np.delete(fluxes_corrected, to_del)\n # Insert the 'mean' spectrum at this indice\n idx = np.where(waves_corrected == good_overlap[i][0])[0]\n waves_corrected = np.insert(waves_corrected, idx, good_overlap[i][1])\n fluxes_corrected = np.insert(fluxes_corrected, idx, good_overlap[i][2])\n\n return waves_corrected, fluxes_corrected\n\n\ndef two_spectra_overlap(wavelength_masked, flux_masked, overlap, nb_overlaps):\n \"\"\"\n Extracts the two spectra which are in the overlapping region.\n Selects in the full spectrum the part of each order\n which overlap with the following.\n\n INPUTS: WAVELENGTH_MASKED, FLUX_MASKED = masked arrays of the CARMENES spectrum\n OVERLAP = number of the overlapping region (usually btw 0 and 10)\n NB_OVERLAPS = total number of overlapping regions\n\n OUTPUTS: WL_LEFT, FLUX_LEFT = array of wavelength and flux, order N\n WL_RIGHT, FLUX_RIGHT = array of wavelength and flux, order N+1\n \"\"\"\n i = overlap\n # First overlap\n if i == 0:\n wl_left = wavelength_masked[i][wavelength_masked[i].mask].data\n flux_left = flux_masked[i][wavelength_masked[i].mask].data\n\n wl_right_full = wavelength_masked[i+1][wavelength_masked[i+1].mask].data\n flux_right_full = flux_masked[i+1][wavelength_masked[i+1].mask].data\n # right order has two overlapping regions, select the first_overlap\n # can be better take the first half of the list should be enough\n mid_order = wl_right_full[0] + (wl_right_full[-1]-wl_right_full[0])/2.\n idx_wl = np.where(wl_right_full < mid_order)\n\n wl_right = wl_right_full[idx_wl]\n flux_right = flux_right_full[idx_wl]\n\n # Last overlap\n elif i == nb_overlaps-1:\n # can be delete\n wl_left_full = wavelength_masked[i][wavelength_masked[i].mask].data\n flux_left_full = flux_masked[i][wavelength_masked[i].mask].data\n mid_order = wl_left_full[0] + (wl_left_full[-1]-wl_left_full[0])/2.\n idx_wl = np.where(wl_left_full > mid_order)\n wl_left = wl_left_full[idx_wl]\n flux_left = flux_left_full[idx_wl]\n\n wl_right = wavelength_masked[i+1][wavelength_masked[i+1].mask].data\n flux_right = flux_masked[i+1][wavelength_masked[i+1].mask].data\n\n # Middle overlaps\n else:\n # LEFT\n wl_left_full = wavelength_masked[i][wavelength_masked[i].mask].data\n flux_left_full = flux_masked[i][wavelength_masked[i].mask].data\n mid_order = wl_left_full[0] + (wl_left_full[-1]-wl_left_full[0])/2.\n idx_wl = np.where(wl_left_full > mid_order)\n wl_left = wl_left_full[idx_wl]\n flux_left = flux_left_full[idx_wl]\n # RIGHT\n wl_right_full = wavelength_masked[i+1][wavelength_masked[i+1].mask].data\n flux_right_full = flux_masked[i+1][wavelength_masked[i+1].mask].data\n mid_order = wl_right_full[0] + (wl_right_full[-1]-wl_right_full[0])/2.\n idx_wl = np.where(wl_right_full < mid_order)\n wl_right = wl_right_full[idx_wl]\n flux_right = flux_right_full[idx_wl]\n\n return wl_left, flux_left, wl_right, flux_right\n\n\ndef crosscorrelation(w, f, tw, tf, rvmin=-4., rvmax=4.0, drv=0.1):\n drvs = np.arange(rvmin, rvmax, drv)\n cc = np.zeros(len(drvs))\n for i, rv in enumerate(drvs):\n fi = interp1d(tw+rv, tf)\n cc[i] = np.sum(f * fi(w))\n\n return drvs, cc\n\n\ndef continuous_nb_in_list(data):\n \"\"\"\n Finds in a list the sequences of consecutive numbers\n \"\"\"\n\n continuous = []\n # data = [2, 3, 4, 5, 12, 13, 14, 15, 16, 17]\n # data = np.where(waves.mask)[0]\n # Find continuous numbers in data\n # for k, g in groupby(data, keyfunc):\n # g = group : groups each value in data with its index\n # lambda : small function which substract each value by its index\n # k = key : result of the lambda function\n for k, g in groupby(enumerate(data), lambda (i, x): i-x):\n group = map(itemgetter(1), g)\n continuous.append(group)\n\n return continuous\n\n\ndef rv_shift(wl_0, flux_0, wl_1, flux_1):\n \"\"\"\n Find the rv shift needed to flux1 to match flux0\n INPUTS: WL_0, FLUX_0 = wavelength and flux of the spectrum 0\n WL_1, FLUX_1 = wavelength and flux of the spectrum 1\n template shifted to match spectrum 0\n OUTPUTS: RV = the rv shift\n WL_1_CORR = wavelength 1 shifted to match spectrum 0\n \"\"\"\n try:\n # Cross correlation\n rv, cc = pyasl.crosscorrRV(\n wl_0, flux_0, wl_1, flux_1,\n rvmin=-4., rvmax=4.0, drv=0.01, mode='doppler', skipedge=20.)\n maxind = np.argmax(cc)\n print \"Cross-correlation function is maximized at dRV = \", rv[maxind], \" km/s\"\n\n # Doppler shift the wavelengths\n wl_1_corr = wl_1 * (1. + (rv[maxind])/c.to('km/s').value)\n\n return rv[maxind], wl_1_corr\n\n except pyasl.PE.PyAValError:\n print \"Failed cross correlation - No RV shift\"\n\n return 0, wl_1\n\n\ndef find_continuum(wl, flux):\n #\n # Defines continuum points as the ones above the median\n # Calculate the threshold, standard deviation and mean\n #\n \"\"\"\n # IDEA: make a second iteration\n # of the continuum definition so it will be better\n # def define_cont(flux_overlapping, value):\n # return continuum, threshold\n \"\"\"\n\n threshold = 0.\n mean_cont = 0.\n std_cont = 0.\n\n abv_median = np.where(flux > np.median(flux))[0]\n if any([abv_median.size == 0, wl.size == 0]):\n print 'No continuum definition, empty array'\n pass\n elif wl[-1]-wl[0] < 3.:\n print 'No continuum definition, overlap < 3A'\n pass\n else:\n possible_cont = continuous_nb_in_list(abv_median.tolist())\n idx_cont = max(possible_cont, key=len)\n # Stats on the continuum\n mean_cont = np.mean(flux[idx_cont])\n std_cont = np.std(flux[idx_cont])\n threshold = mean_cont - np.sqrt(std_cont)\n # plt.plot(wl_i[idx_cont], flux_i[idx_cont], 'yo')\n\n #print 'Threshold at: ', threshold\n\n return threshold, mean_cont, std_cont\n\n\ndef suppress_artefact(wl, flux, jump=0.15, interact=False):\n \"\"\"\n -- Use with parsimony --\n Deletes artefacts (sharp peaks) in the spectrum\n if the difference in flux btw two consecutive points is larger than JUMP\n\n INPUTS: WL, FLUX = arrays wavelength and flux of the spectrum\n JUMP = threshold of the difference btw two consecutive points\n INTERACT = bool, False by default\n if True plot is shown and waits for confirmation by user\n\n OUTPUTS: NEW_WL, NEW_FLUX = modified wavelength and flux arrays\n \"\"\"\n index = []\n # Whats the difference with ediff1 in wrange_exclude ???\n diff = np.diff(flux)\n artefact = [(wl[i], i) for i in range(len(diff)) if abs(diff[i]) > jump]\n for i in range(len(artefact)):\n print '- WL artefact: ', artefact[i][0]\n if interact:\n plt.figure(i+10)\n plt.plot(wl, flux, 'mo-')\n plt.title('Is it an artefact?')\n plt.axis([artefact[i][0]-5, artefact[i][0]+5, 0, 1])\n yes = raw_input(\" >> If it's an artefact, type yes: \")\n if str(yes) == 'yes':\n #print 'Deleting 5 points ... !'\n index.append((artefact[i][1]-2, artefact[i][1]-1, artefact[i][1], artefact[i][1]+1, artefact[i][1]+2))\n else:\n #print 'Deleting 5 points ... !'\n index.append((artefact[i][1]-2, artefact[i][1]-1, artefact[i][1], artefact[i][1]+1, artefact[i][1]+2))\n\n new_wl = np.delete(wl, index)\n new_flux = np.delete(flux, index)\n\n return new_wl, new_flux\n\n\ndef mean_spectrum(wl_right, flux_right, wl_left, flux_left, mean_cont, std_cont):\n \"\"\"\n Calculates the mean spectrum of two overlapping spectra\n and chooses the spectrum which does not decrease in brightness at the edges\n INPUTS: WL_0, FLUX_0 = wavelength and flux of the spectrum 0\n WL_1, FLUX_1 = wavelength and flux of the spectrum 1\n THRESHOLD = value below the continuum and its noise\n\n OUTPUTS: NEW_FLUX = new spectrum on the overlapping region\n \"\"\"\n # Interpolate spectra left and right\n fct_left = interp1d(wl_left, flux_left)\n fct_right = interp1d(wl_right, flux_right)\n\n # Define overlapping wavelength range\n if wl_right[0] > wl_left[0]:\n wl_start = wl_right[0]\n else:\n wl_start = wl_left[0]\n\n if wl_right[-1] > wl_left[-1]:\n wl_end = wl_left[-1]\n else:\n wl_end = wl_right[-1]\n\n new_wl = np.arange(wl_start, wl_end, 0.01)\n\n # Define the commun flux\n flux_start = []\n flux_end = []\n flux_middle = []\n\n idx_start = 0\n idx_end = len(new_wl)-1\n\n for idx, wl in enumerate(new_wl):\n if std_cont == 0.:\n break\n elif abs(fct_right(wl)-fct_left(wl)) > std_cont:\n flux_start.append(fct_left(wl))\n else:\n idx_start = idx\n break\n\n for idx, wl in reversed(list(enumerate(new_wl))):\n if std_cont == 0.:\n break\n elif abs(fct_right(wl) - fct_left(wl)) > std_cont:\n flux_end.insert(0, fct_right(wl))\n else:\n idx_end = idx\n break\n\n ## 300ms long...\n ##flux_middle2 = [np.mean([fct_left(wl), fct_right(wl)]) for idx, wl in enumerate(new_wl[idx_start:idx_end+1], start=idx_start)]\n for idx, wl in enumerate(new_wl[idx_start:idx_end+1], start=idx_start):\n mean_flux = np.mean([fct_left(wl), fct_right(wl)])\n flux_middle.append(mean_flux)\n\n new_flux = np.concatenate([flux_start, flux_middle, flux_end])\n\n for i, wl in reversed(list(enumerate(wl_left))):\n if wl < new_wl[0]:\n new_wl = np.insert(new_wl, 0, wl)\n new_flux = np.insert(new_flux, 0, flux_left[i])\n for i, wl in enumerate(wl_right):\n if wl > new_wl[-1]:\n new_wl = np.append(new_wl, wl)\n new_flux = np.append(new_flux, flux_right[i])\n\n return new_wl, new_flux\n\n\nclass Spectrum(object):\n def __init__(self, name, header, wavelength, flux, non_corr_flux, cont):\n self.name = name\n self.header = header\n self.wavelength = wavelength\n self.flux = flux\n self.non_corr_flux = non_corr_flux\n self.cont = cont\n\n @classmethod\n def from_file(cls, filename):\n \"\"\"\n # Create an object of the Spectrum class from a CARMENES FITS file\n # INPUTS: CLS = instance created when Spectrum class is called,\n # before init function\n # FILENAME = name of the CARMENES FITS file\n # OUTPUT: SPECTRUM = object of the Spectrum class\n \"\"\"\n print cls\n name = filename.strip()\n hdu = fits.open(filename.strip())\n header = hdu[0].header\n wavelength = hdu[4].data\n flux = hdu[1].data\n non_corr_flux = hdu[3].data\n cont = hdu[2].data\n spectrum = cls(name, header, wavelength, flux, non_corr_flux, cont)\n return spectrum\n\n def mask_overlap_rgn(self):\n \"\"\"\n # Create a mask for the overlapping orders of CARMENES\n # Apply this mask to the wavelengths and fluxes\n # INPUTS: SELF\n # OUTPUTS: WAVES = masked array of the wl\n # FLUXES = masked array of the flux\n # COUNT = number of overlapping orders\n # MASK = mask (redundant bc included in the masked arrays)\n \"\"\"\n count = 0\n for i in range(self.wavelength.shape[0]):\n # Identify the overlapping regions on the wavelengths\n # First wavelength array\n if i == 0:\n wave_start = ma.masked_where(\n self.wavelength[i] > self.wavelength[i+1][0],\n self.wavelength[i], copy=True)\n waves = wave_start\n #print 'First wave', waves.shape\n if np.sum(wave_start.mask) != 0:\n count += 1\n #print 'How many masked elements?', np.sum(wave_start.mask)\n\n # Last wavelength array\n elif i == self.wavelength.shape[0]-1:\n wave_end = ma.masked_where(\n self.wavelength[i] < self.wavelength[i-1][-1],\n self.wavelength[i], copy=True)\n waves = ma.vstack((waves, wave_end))\n #print 'Last wave', waves.shape\n if np.sum(wave_end.mask) != 0:\n count += 1\n #print 'How many masked elements?', np.sum(wave_end.mask)\n # All the other ones\n else:\n wave_mid = ma.masked_where(\n (self.wavelength[i] > self.wavelength[i+1][0]) |\n (self.wavelength[i] < self.wavelength[i-1][-1]),\n self.wavelength[i], copy=True)\n waves = ma.vstack((waves, wave_mid))\n # print 'Middle', waves.shape\n if np.sum(wave_mid.mask) != 0:\n count += 1\n # print 'How many masked elements?', np.sum(wave_mid.mask)\n\n # count -1 bc it counts the orders instead of the overlaps\n print 'Number of overlapping orders: ', count-1\n # Apply the mask to the flux\n fluxes = ma.array(self.flux, mask=waves.mask)\n\n return waves, fluxes, count-1, waves.mask\n\n def treat_overlap(self, wavelength_masked, flux_masked, nb_overlaps):\n \"\"\"\n Create an unique spectrum for each overlapping regions\n ---------------------------------------------------------------------\n Steps:\n - Delete artefact found in the data\n - Find a Doppler shift in the first two overlapping regions\n (after the shift seems too small to be found,\n I guess resampling is needed... YET TO BE DONE !)\n - Define a very basic continuum and a threshold under which\n the decrease in brightness at the edges of the orders are avoided\n - Take the mean spectrum in the overlapping region\n - Put together the corrected overlapping regions\n and the rest of the spectrum\n ---------------------------------------------------------------------\n INPUTS: WAVELENGTH_MASKED, FLUX_MASKED = masked arrays of the CARMENES spectrum\n NB_OVERLAPS = total number of overlapping regions\n\n OUTPUTS: WAVES_CORRECT, FLUXES_CORRECT = corrected CARMENES spectrum\n \"\"\"\n idx_true = np.where(wavelength_masked.flatten().mask == True)\n group_idx = continuous_nb_in_list(idx_true[0])\n good_overlap = []\n plt.figure(1)\n\n for i in range(nb_overlaps):\n print '\\n-- Overlap nb:', i+1, '--'\n # WL after which the corrected flux should be inserted\n wl_insert = spectrum10.wavelength.flatten()[group_idx[i][-1]+1]\n print 'WL insert: ', wl_insert\n\n # Find the two orders which overlap\n wl_left, flux_left, wl_right, flux_right = two_spectra_overlap(wavelength_masked, flux_masked, i, nb_overlaps)\n\n # Suppress artefact in the overlapping region\n wl_left, flux_left = suppress_artefact(wl_left, flux_left, interact=False)\n wl_right, flux_right = suppress_artefact(wl_right, flux_right, interact=False)\n\n # Plotting\n # plt.plot(wl_right, flux_right, 'k.-', label='Right')\n # plt.plot(wl_left, flux_left, 'b.-', label='Left')\n\n # RV shift\n if i == 0 or i == 1:\n # RV shift in the first two overlapping regions\n # !! Shift seems too small in the following orders, need to be IMPROVED !!\n shift, wl_left = rv_shift(wl_right, flux_right, wl_left, flux_left)\n plt.plot(wl_left, flux_left, 'go-', label='Left RV shift')\n\n # Find the continuum mean and std in the overlapping region\n threshold, mean, std = find_continuum(wl_left, flux_left)\n # Computes the 'mean' spectrum in the overlapping region\n new_wl, new_flux = mean_spectrum(wl_right, flux_right, wl_left, flux_left, mean, std)\n\n # Save the correct overlap and the position (wl_insert)\n good_overlap.append([wl_insert, new_wl, new_flux])\n\n # Plotting\n #plt.legend()\n\n wl_correct, flux_correct = corrected_overlap(wavelength_masked, flux_masked, good_overlap)\n # plt.plot(wl_correct, flux_correct, 'm.--', label='Corrected spectrum')\n # plt.legend()\n\n return wl_correct, flux_correct\n\n def wrange_exclude(self, waves, fluxes):\n \"\"\"\n # Create the exclusion range in wavelength for Molecfit\n # INPUTS :\n # OUTPUTS: None\n # Write wavelength_exclude.dat file used by Molecfit\n \"\"\"\n if any([np.ma.is_masked(waves), np.ma.is_masked(fluxes)]):\n waves = self.wavelength.flatten()\n # Wavelength without the overlapping regions\n waves_clean = waves.compressed()\n else:\n waves_clean = waves\n\n # Differences btw two consecutive elmts\n diff = np.ediff1d(waves_clean)\n # Mask is True when the WL are far to each other\n mask = [diff[i] > 10.*np.mean(diff) for i in range(diff.shape[0])]\n diff_masked = ma.masked_array(diff, mask=mask)\n\n # Wavelength ranges to exclude\n # [expression for i in list if condition]\n # WL should already be in microns, Molecfit doesnt convert the masks\n wranges = [(waves_clean[i]*0.0001, waves_clean[i+1]*0.0001)\n for i in range(diff_masked.shape[0])\n if diff_masked.mask[i]]\n\n print 'Writing wrange_exclude.dat ...'\n with open('wrange_exclude.dat', 'w') as f:\n writer = csv.writer(f, delimiter=' ')\n writer.writerows(wranges)\n\n # for i in range(len(wranges)):\n # plt.plot((wranges[i][0]*10000., wranges[i][0]*10000.),\n # (-0.5, 1.5), 'k--')\n # plt.plot((wranges[i][1]*10000., wranges[i][1]*10000.),\n # (-0.5, 1.5), 'b--')\n\n # plt.plot(spectrum.wavelength.flatten(), spectrum.flux.flatten(), 'g-')\n # plt.xlabel('Wavelength in microns')\n # plt.ylabel('Flux')\n # plt.title('CARMENES spectrum')\n return None\n\n def wrange_include(self, h2o=True, o2=False, co2=True, ch4=False):\n \"\"\"\n # Create the inclusion range in wavelength for Molecfit\n # INPUTS :\n # OUTPUTS: None\n # Write wavelength_include.dat file used by Molecfit\n \"\"\"\n\n wranges = []\n wrange_h2o = [1.10, 1.12]\n # wrange_h2o = [1.18, 1.20]\n wrange2_h2o = [1.34, 1.36]\n wrange_o2 = [1.26, 1.29]\n wrange_co2 = [1.56, 1.64]\n wrange_ch4 = [1.64, 1.72]\n\n if h2o:\n wranges.append(wrange_h2o)\n wranges.append(wrange2_h2o)\n if o2:\n wranges.append(wrange_o2)\n\n if co2:\n wranges.append(wrange_co2)\n\n if ch4:\n wranges.append(wrange_ch4)\n\n print 'Writing wrange_include.dat ...'\n with open('wrange_include.dat', 'w') as f:\n writer = csv.writer(f, delimiter=' ')\n writer.writerows(wranges)\n\n return None\n\n def input_molecfit(self, wl=[], flux=[]):\n # Creates an input file readable for Molecfit\n if any([wl.size == 0, flux.size == 0]):\n # Bin Table with data\n print 'I am using the original FITS file'\n tb_hdu = fits.BinTableHDU.from_columns(\n [fits.Column(name='WAVE', format='1D', array=self.wavelength.flatten()),\n fits.Column(name='SPEC', format='1D', array=self.flux.flatten()),\n fits.Column(name='CONT', format='1D', array=self.cont.flatten()),\n fits.Column(name='SIG', format='1D', array=self.non_corr_flux.flatten())])\n else:\n print 'I am using wl and flux corrected'\n tb_hdu = fits.BinTableHDU.from_columns(\n [fits.Column(name='WAVE', format='1D', array=wl),\n fits.Column(name='SPEC', format='1D', array=flux),\n fits.Column(name='CONT', format='1D', array=self.cont.flatten()),\n fits.Column(name='SIG', format='1D', array=self.non_corr_flux.flatten())])\n\n # Header\n head_hdu = fits.PrimaryHDU(header=self.header)\n hdu_list = fits.HDUList([head_hdu, tb_hdu])\n\n print('Writing input_file_molecfit.fits ... ')\n hdu_list.writeto('input_file_molecfit.fits', clobber=True)\n\n return None\n\n def extract_range(wl, flux, wl_start, wl_end):\n \"\"\"\n Finds closest values to the wl_start and wl_end\n and return this range for the wl and the flux\n \"\"\"\n idx_start = np.argmin(np.abs(wl_start - wl))\n idx_end = np.argmin(np.abs(wl_end - wl))\n\n return wl[idx_start:idx_end], flux[idx_start:idx_end]\n\nif __name__ == \"__main__\":\n\n # Create Spectrum object from filename\n # t0 = time.time()\n\n filename = 'car-20160420T20h45m44s-sci-cabj-nir_A.fits'\n # filename2 = 'car-20160420T20h26m49s-sci-cabj-nir_A.fits'\n\n spectrum10 = Spectrum.from_file(filename)\n # spectrum11 = Spectrum.from_file(filename2)\n\n # Mask the overlaps\n wavelength_masked, flux_masked, nb_overlaps, mask = Spectrum.mask_overlap_rgn(spectrum10)\n\n # # Correct the overlaps\n wl, flux = Spectrum.treat_overlap(spectrum10, wavelength_masked, flux_masked, nb_overlaps)\n\n # t1 = time.time()\n\n # print 'Time for correcting overlap', t1-t0\n # # Delete artefact on the full spectrum\n wl_corr, flux_corr = suppress_artefact(wl, flux)\n\n # # Create input FITS file for Molecfit\n # Spectrum.input_molecfit(spectrum10, wl_corr, flux_corr)\n\n # # Create exclusion mask for Molecfit\n # Spectrum.wrange_exclude(spectrum10, wl_corr, flux_corr)\n\n # # Create inclusion mask for Molefit\n # # to fit by default H2O and CO2, O2 and CH4 can also be included\n # Spectrum.wrange_include(spectrum10, h2o=True, o2=False, co2=False, ch4=False)\n\n # Plotting\n plt.figure(1)\n for i in range(5):\n plt.plot(spectrum10.wavelength[i], spectrum10.flux[i], 'k--', label='Separated orders 1D spectrum')\n plt.legend()\n\n plt.subplot(212)\n plt.plot(wl_corr, flux_corr, 'm.-', label='Full 1D spectrum')\n plt.legend()\n\n plt.figure(2)\n plt.subplot(311)\n plt.title('Merging CARMENES 1D orders')\n plt.plot(spectrum10.wavelength[0], spectrum10.flux[0], 'b-', label='Order 0')\n plt.plot(spectrum10.wavelength[1], spectrum10.flux[1], 'g-', label='Order 1')\n plt.plot(wl_corr, flux_corr, 'm-', label='Merged spectrum')\n plt.axis([9757, 9784, 0.1, 0.75])\n plt.legend(loc='lower right')\n\n plt.subplot(312)\n plt.ylabel('Normalized Flux')\n plt.plot(spectrum10.wavelength[1], spectrum10.flux[1], 'g-', label='Order 1')\n plt.plot(spectrum10.wavelength[2], spectrum10.flux[2], 'y-', label='Order 2')\n plt.plot(wl_corr, flux_corr, 'm-', label='Merged spectrum')\n plt.axis([9917, 9941, 0.5, 0.85])\n plt.legend(loc='lower right')\n\n plt.subplot(313)\n plt.xlabel('Wavelength (Angstrom)')\n plt.plot(spectrum10.wavelength[2], spectrum10.flux[2], 'y-', label='Order 2')\n plt.plot(spectrum10.wavelength[3], spectrum10.flux[3], 'c-', label='Order 3')\n plt.plot(wl_corr, flux_corr, 'm-', label='Merged spectrum')\n plt.axis([10083, 10104, 0.5, 0.8])\n plt.legend(loc='lower right')\n\n # PHOENIX Model\n # # PHOENIX model for HD 79210\n filename = '/home/solene/phoenix/WAVE_PHOENIX-ACES-AGSS-COND-2011.fits'\n hdu = fits.open(filename)\n wl_phoenix = hdu[0].data\n\n filename = '/home/solene/phoenix/lte03900-4.50-0.0.PHOENIX-ACES-AGSS-COND-2011-HiRes.fits'\n hdu = fits.open(filename)\n flux_phoenix = hdu[0].data\n\n plt.figure(3)\n plt.title('PHOENIX model for HD79210')\n plt.plot(wl_phoenix, flux_phoenix)\n plt.axis([10000, 15000, 0.1*1e14, 15.*1e14])\n\n plt.plot(wl_corr, flux_corr*1e14, 'g.-', label='CARMENES spectrum')\n\n # Save in txt file\n\n path = \"/home/solene/atmos/carmenes/wl_flux_carmenes.txt\"\n file = open(path, 'w+')\n data = (np.array([wl_corr, flux_corr])).T # transpose data, to have it in two columns\n np.savetxt(file, data)\n file.close()\n" } ]
5
AMiller113/portfolio-project
https://github.com/AMiller113/portfolio-project
0a95185602949caf762aaeb0da341e3ade3906b4
89c222f08e006259435ae460ad90d5ffa990bfc0
cf4763c063c27c4db2536556cbf947b0b19c6fdd
refs/heads/master
2021-01-06T07:21:08.837117
2020-02-27T01:13:44
2020-02-27T01:13:44
241,244,694
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5856930017471313, "alphanum_fraction": 0.5931445360183716, "avg_line_length": 31, "blob_id": "3f7edd7fc3430dc1569b994933bdde13b2093d09", "content_id": "6e1762baee6fa23265dd4599982d9f544291a75d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 671, "license_type": "no_license", "max_line_length": 81, "num_lines": 21, "path": "/blog/models.py", "repo_name": "AMiller113/portfolio-project", "src_encoding": "UTF-8", "text": "from django.db import models\n\nclass Blog(models.Model):\n title = models.CharField(max_length=150)\n publication_date = models.DateTimeField()\n blog_post_text = models.TextField()\n blog_image = models.ImageField(upload_to='images/')\n\n def summary(self):\n index = 0\n for i, char in enumerate(self.blog_post_text):\n if i > len(self.blog_post_text)//3 and char is '.' or char is '/r/n':\n index = i\n break\n return self.blog_post_text[:index] + ' (continued....)'\n\n def pub_date_alt(self):\n return self.publication_date.strftime('%b %e, %Y')\n\n def __str__(self):\n return self.title" }, { "alpha_fraction": 0.5013054609298706, "alphanum_fraction": 0.584856390953064, "avg_line_length": 20.27777862548828, "blob_id": "b1493152979ec58e710904fd681b78064638e4d5", "content_id": "2fdb0141326e807409d466de932c66e3679e0953", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 383, "license_type": "no_license", "max_line_length": 48, "num_lines": 18, "path": "/blog/migrations/0003_auto_20200219_1542.py", "repo_name": "AMiller113/portfolio-project", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.10 on 2020-02-19 20:42\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('blog', '0002_auto_20200219_1540'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='blog',\n name='publication_date',\n field=models.DateTimeField(),\n ),\n ]\n" } ]
2
acnagy/test-blowdrycss
https://github.com/acnagy/test-blowdrycss
8ed14dc6cc0f7f309c5b5d233c260fb59ea9dfd1
bd9603dc87dc304b811213e3e6c3c97afa7f5966
95866cd2e4ca5fcbe2ac0da20fc4310dfdb429d0
refs/heads/master
2021-01-16T22:03:54.812624
2016-08-19T18:53:30
2016-08-19T18:53:30
66,104,906
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.552706241607666, "alphanum_fraction": 0.558066189289093, "avg_line_length": 35.316795349121094, "blob_id": "9798655a3d999347667c42585414aeb7b9483142", "content_id": "d562fe026977bf2c927c69a8c67a2923f8dc4c7b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9515, "license_type": "permissive", "max_line_length": 124, "num_lines": 262, "path": "/blowdrycss/utilities.py", "repo_name": "acnagy/test-blowdrycss", "src_encoding": "UTF-8", "text": "# python 2\nfrom __future__ import absolute_import, print_function, division, unicode_literals\nfrom builtins import str, round\n\n# builtins\nfrom re import search, findall\nfrom inspect import currentframe\nfrom os import path, stat, getcwd, makedirs, remove\nimport logging\n\n# custom\nimport blowdrycss_settings as settings\n\n\n__author__ = 'chad nelson'\n__project__ = 'blowdrycss'\n\n\ndef contains_a_digit(string=''):\n \"\"\"\n Check if string contains a digit ``[0-9]``.\n\n :type string: str\n\n :param string: The string to test.\n :return: (bool) -- Returns True if string contains at least 1 digit. Otherwise, returns False.\n\n **Examples:**\n\n >>> contains_a_digit('abc1')\n True\n >>> contains_a_digit('876')\n True\n >>> contains_a_digit('cat')\n False\n >>> contains_a_digit('')\n False\n >>> contains_a_digit(' ')\n False\n\n \"\"\"\n return True if search(r\"[0-9]\", string) else False\n\n\ndef deny_empty_or_whitespace(string='', variable_name=''):\n \"\"\"\n Prevent ``string`` or ``variable_name`` from being empty or only containing whitespace.\n\n :raises ValueError: Raises a ValueError if the string or the variable_name is empty or only contains whitespace.\n The ValueError contains the name of the calling function and the variable name used in the calling function.\n\n :type string: str\n :type variable_name: str\n\n :param string: The string to test.\n :param variable_name: The name of the variable used in the calling function.\n :return: None\n\n \"\"\"\n if not variable_name: # '' and None cases\n calling_function = currentframe().f_back.f_code.co_name\n raise ValueError(calling_function + ': variable_name input cannot be empty or None.')\n if not variable_name.strip(): # whitespace case\n calling_function = currentframe().f_back.f_code.co_name\n raise ValueError(calling_function + ': variable_name input cannot only contain whitespace.')\n\n if not string: # '' and None cases\n calling_function = currentframe().f_back.f_code.co_name\n raise ValueError(calling_function + ':', variable_name, 'cannot be empty or None.')\n if not string.strip(): # whitespace case\n calling_function = currentframe().f_back.f_code.co_name\n raise ValueError(calling_function + ':', variable_name, 'cannot only contain whitespace.')\n\n\ndef get_file_path(file_directory='', file_name='blowdry', extension=''):\n \"\"\" Joins the ``file_directory``, ``file_name``, and ``extension``. Returns the joined file path.\n\n **Rules:**\n\n - Do not allow ``''`` empty input for ``file_directory``, ``file_name``, or ``extension``.\n - Transform extension to lowercase.\n - Extensions must match this regex r\"(^[.][.0-9a-z]*[0-9a-z]$)\".\n\n **Findall regex Decoded:**\n\n - ``r\"(^[.][.0-9a-z]*[0-9a-z]$)\"``\n - ``^[.]`` -- ``extension`` must begin with a ``.`` dot.\n - ``[.0-9a-z]*`` -- ``extension`` may contain any of the character inside the brackets.\n - ``[0-9a-z]$`` -- ``extension`` may only end with the characters inside the brackets.\n\n :type file_directory: str\n :type file_name: str\n :type extension: str\n\n :param file_directory: Directory in which to place the file.\n :param file_name: Name of the file (excluding extension)\n :param extension: A file extension including the ``.``, for example, ``.css``, ``.min.css``, ``.md``,\n ``.html``, and ``.rst``\n :return: (*str*) -- Returns the joined file path.\n\n \"\"\"\n deny_empty_or_whitespace(string=file_directory, variable_name='file_directory')\n deny_empty_or_whitespace(string=file_name, variable_name='file_name')\n\n extension = extension.lower()\n regex = r\"(^[.][.0-9a-z]*[0-9a-z]$)\"\n if len(findall(regex, extension)) == 1:\n return path.join(file_directory, file_name + extension)\n else:\n raise ValueError(\n 'Extension: ' + extension + ' contains invalid characters. Only \".\", \"0-9\", and \"a-z\" are allowed.'\n )\n\n\ndef change_settings_for_testing():\n \"\"\" Change settings directories for testing.\n\n .. warning::\n\n This method should only be used by the unit_test framework.\n\n :return: None\n\n \"\"\"\n cwd = getcwd()\n\n # The if/else logic is required for unit testing.\n if cwd.endswith('unit_tests'): # Allows running of pycharm unittest.\n settings.markdown_directory = path.join(cwd, 'test_markdown')\n settings.project_directory = path.join(cwd, 'test_examplesite')\n settings.css_directory = path.join(settings.project_directory, 'test_css')\n settings.docs_directory = path.join(cwd, 'test_docs')\n else: # Run unittest cmd from the root directory.\n settings.markdown_directory = path.join(cwd, 'blowdrycss', 'unit_tests', 'test_markdown')\n settings.project_directory = path.join(cwd, 'blowdrycss', 'unit_tests', 'test_examplesite')\n settings.css_directory = path.join(settings.project_directory, 'test_css')\n settings.docs_directory = path.join(cwd, 'blowdrycss', 'unit_tests', 'test_docs')\n\n\ndef unittest_file_path(folder='', filename=''):\n \"\"\" Determines the path of assigned to the folder and file based on the directory in which the unittest command\n is executed.\n\n :type folder: str\n :type filename: str\n\n :param folder: Name of the folder where the file is located.\n :param filename: Name of the file including extension e.g. test_aspx.aspx\n\n :return: (*str*) -- Return the path of the file to test.\n\n \"\"\"\n cwd = getcwd()\n\n if cwd.endswith('unit_tests'): # Allows running of pycharm unittest.\n the_path = path.join(cwd, folder, filename)\n else: # Run unittest cmd from the root directory.\n the_path = path.join(cwd, 'blowdrycss', 'unit_tests', folder, filename)\n\n return the_path\n\n\ndef print_css_stats(file_name=''):\n \"\"\" ``file_name`` the full file_name excluding extension e.g. 'blowdry' or 'site'.\n Assumes that the extensions to append to the file_name are '.css' and '.min.css'.\n Print the size of a file_name.\n\n :type file_name: str\n :param file_name: Name of the CSS files.\n :return: None\n\n \"\"\"\n css_file = file_name + '.css'\n min_file = file_name + '.min.css'\n\n css_dir = path.join(settings.css_directory, css_file) # Get full file path.\n min_dir = path.join(settings.css_directory, min_file)\n\n css_size = stat(css_dir).st_size # Get file size in Bytes.\n min_size = stat(min_dir).st_size\n\n try:\n percent_reduced = round( # Calculate percentage size reduced.\n float(100) - float(min_size) / float(css_size) * float(100),\n 1 # Precision\n )\n except ZeroDivisionError:\n percent_reduced = round(0.0, 1)\n\n css_kb = round(float(css_size) / float(1000), 1) # Convert to kiloBytes.\n min_kb = round(float(min_size) / float(1000), 1)\n\n css_stats = (\n '\\n' + str(css_file) + ':\\t ' + str(css_kb) + 'kB\\n' +\n str(min_file) + ': ' + str(min_kb) + 'kB\\n' +\n 'CSS file size reduced by ' + str(percent_reduced) + '%.'\n )\n logging.debug(css_stats)\n print(css_stats)\n\n\ndef print_blow_dryer():\n \"\"\" Prints an image of a blow dryer using ASCII.\n\n `A nice png to ascii converter <http://picascii.com>`__\n\n :return: None\n\n \"\"\"\n blow_dryer_ascii = \"\"\"\n .-'-.\n ;@@@@@@@@@'\n ~~~~ ;@@@@@@@@@@@@@@@@@@@+`\n ~~~~ ;@@@@@@@@@@@@@``@@@@@@\n +@@@@@` `@@@@@'\n @@@@``@@@@@\n .-@@@@@@@+\n @@@@@\n .@@@.\n `@@@.\n \"\"\"\n print(str(blow_dryer_ascii))\n\n\ndef make_directory(directory=''):\n \"\"\" Try to make a directory or verify its' existence. Raises an error if neither of these are possible.\n\n :raise OSError: Raises an OSError if the directory cannot be made or found.\n\n :type directory: str\n\n :param directory: A directory path in the file system.\n\n :return: None\n\n \"\"\"\n try: # Python 2.7 Compliant\n makedirs(directory) # Make 'log' directory\n logging.debug('%s created.', directory)\n except OSError:\n if not path.isdir(directory): # Verify directory existence\n raise OSError(directory + ' is not a directory, and could not be created.')\n\n\ndef delete_file_paths(file_paths):\n \"\"\" Delete all file_paths. Use Caution.\n\n Note::\n\n Ignores files that do not exist.\n\n :type file_paths: iterable of strings\n\n :param file_paths: An iterable containing file path strings.\n :return: None\n\n \"\"\"\n for file_path in file_paths:\n try:\n remove(file_path)\n except:\n pass\n" }, { "alpha_fraction": 0.6664285063743591, "alphanum_fraction": 0.6856300830841064, "avg_line_length": 35.957096099853516, "blob_id": "b68940e56cf78ae731d290696152cd03f7406675", "content_id": "e308c89023b66458d3fb045fae7dc9546dd71a71", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 11197, "license_type": "permissive", "max_line_length": 218, "num_lines": 303, "path": "/readme.rst", "repo_name": "acnagy/test-blowdrycss", "src_encoding": "UTF-8", "text": "Read Me\n=======\n\n.. image:: https://img.shields.io/pypi/v/blowdrycss.svg?maxAge=2592000?style=plastic :target: https://pypi.python.org/pypi/blowdrycss\n\n.. image:: https://img.shields.io/travis/nueverest/blowdrycss.svg?maxAge=2592000 :target: https://travis-ci.org/nueverest/blowdrycss\n\n.. image:: https://img.shields.io/coveralls/nueverest/blowdrycss.svg?maxAge=2592000 :target: https://coveralls.io/github/nueverest/blowdrycss\n\n|\n\n`blowdrycss` is a rapid styling tool that compiles DRY CSS from encoded class selectors in your web project files.\n\n\nGetting Started\n~~~~~~~~~~~~~~~\n\n`Quick Start Docs <http://blowdrycss.readthedocs.io/en/latest/quickstart.html>`__\n\n`Official site blowdrycss.org <http://blowdrycss.org>`__\n\n`Full documentation <http://blowdrycss.readthedocs.io/en/latest/index.html>`__\n\n\nVersion Changelog\n~~~~~~~~~~~~~~~~~\n\n| See ``version.py`` for full changelog.\n|\n| **0.2.6** -- Created a filehandler.FileModificationComparator which runs under watchdog mode. This\n feature dramatically improves efficiency by only adding classes based on the files that changed\n before the last run of blowdrycss. The current CSS class selectors are now stored within the\n scope of the watchdog wrapper.\n\n A LimitTimer expires periodically (default is 30 minutes). The expiration triggers a parses\n of all files to delete unused classes.\n\n For those upgrading the package be sure to add ``time_limit = 1800`` to your current ``blowdrycss_settings.py``.\n\n Class selectors that were deleted by the user during file\n modification are temporarily ignored since all eligible files (including the ones\n not modified) would need to be parsed before deletion should be authorized. Deletion and full,\n comprehensive scans of all files now occurs every 1800 seconds (30 minutes). This value can be\n increased or decreased in the settings file by changing ``time_limit``.\n\n Added basic high-level design files.\n\n Force pypandoc==1.1.3 since pandoc doesn't properly install on Windows in version 1.2.\n\n Commented out pip and setuptools from requirements.txt.\n\n PEP8 and typo corrections.\n\n| **0.2.7** -- Added a call to LimitTimer.reset() to fix a bug in which the LimitTimer never expired.\n Add two more color regexes which allow the case in which hex is be combined with a pseudo class.\n e.g. ``hffffff-hover`` or ``hfff-before``.\n\n\nWhy the name blowdrycss?\n~~~~~~~~~~~~~~~~~~~~~~~~\n\nInspiration for the name came from the blow dryer. A blow dryer rapidly drys and styles hair.\n\nSimilarly, ``blowdrycss`` is used to rapidly style HTML and generate DRY CSS files using encoded class names.\n\n\nExample Usage in HTML Tags:\n~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n**Use the CSS level 1, 2.1, and 3 syntax that you already know.**\n\n.. code:: html\n\n <div class=\"text-align-center margin-top-30\">\n <p class=\"font-size-25\">\n The font-size is 25px. <span class=\"green\">Green Text</span>\n </p>\n </div>\n\n``blowdrycss`` decodes the class names ``text-align-center``,\n``margin-top-30``, ``font-size-25``, and ``green``; and generates the\nfollowing atomic CSS in ``blowdry.css``:\n\n::\n\n .text-align-center { text-align: center }\n .margin-top-30 { margin-top: 30px }\n .font-size-25 { font-size: 25px }\n .green { color: green }\n\nAdvantages of blowdrycss\n~~~~~~~~~~~~~~~~~~~~~~~~\n\n#. **Rapid Development:** Less time spent writing CSS, and cleaning up unused style rules.\n#. **DRY (Don't Repeat Yourself):** Reduces CSS file size by only defining properties once.\n#. **Symbiotic:**\n * Can be integrated with the current ecosystem of CSS compilers and frameworks.\n Compatible with SASS, SCSS, PostCSS, LESS, Foundation, Bootstrap.\n * Supports class selector discovery within HTML, JINJA, XHTML, .NET, Ruby ERB Templates, Javascript, and C#.\n#. **Documented:** Hands-on `tutorial <http://blowdrycss.readthedocs.io/en/latest/quickstart.html>`__ and sphinx `documentation <http://blowdrycss.readthedocs.io/en/latest/index.html>`__ to get you up and running fast.\n#. **Robust:** Built for the real world in which deadlines and division of labor is not always taken into account. Can be used across all phases of a products lifecycle from prototype to production.\n#. **Customizable:** Features can be turned on and off inside of `blowdrycss_settings.py <https://github.com/nueverest/blowdrycss/blob/master/blowdrycss/blowdrycss_settings.py>`__. Examples include:\n * Watchdog file monitoring\n * Logging\n * Unit parsing\n * Color parsing\n * Font parsing\n * CSS Minification\n * Media query parsing.\n#. **Atomic:** Generates atomic CSS declarations.\n#. **Standardized:** HTML5 compatible. All `W3C CSS <http://www.w3.org/Style/CSS/Overview.en.html>`__ Level 2.1, and Level 3 properties implemented. PEP8 Compliant.\n#. **Tested:** UnitTest Coverage\n#. **Permissive:** `MIT license <https://github.com/nueverest/blowdrycss/blob/master/LICENSE>`__\n\nRequirements\n~~~~~~~~~~~~\n\n- `Python 2.7.x or 3.3+ <https://www.python.org/downloads/>`__ (required)\n- `cssutils 1.0.1+ <https://bitbucket.org/cthedot/cssutils>`__ (required)\n- `future 0.15.2+ <https://pypi.python.org/pypi/future>`__ (required - for Python 2.7)\n- `pandoc <https://pypi.python.org/pypi/pypandoc/1.1.3#installing-pandoc>`__ (required - file type conversion)\n- `pypandoc 1.1.2+ <pypi.python.org/pypi/pypandoc/1.1.3>`__ (required - file type conversion)\n- `watchdog 0.8.2+ <https://pypi.python.org/pypi/watchdog/0.8.3>`__ (required - monitor directory and auto-generate CSS)\n\nOptional\n''''''''\n\n- tox 2.3.1+ (Multi-environment testing)\n- `tox-travis 0.4+ <https://pypi.python.org/pypi/tox-travis>`__ (Allows tox to be used on Travis CI.)\n- coverage 4.0.2+ (Check test coverage)\n- `coveralls 1.1+ <https://github.com/coagulant/coveralls-python>`__ (Used to report coverage when tox is run via Travis CI.)\n- sphinx 1.3.3+ (docs)\n\nPre-Requisite Knowledge\n~~~~~~~~~~~~~~~~~~~~~~~\n\n- Basic HTML and CSS\n- Zero programming experience required.\n\nMotivation\n~~~~~~~~~~\n\nThis tool was created after seeing how many companies manage their CSS files. The following are some scenarios:\n\nScenario 1 - WET (Write Everything Twice) CSS\n'''''''''''''''''''''''''''''''''''''''''''''\n\nInside a CSS file you find the following:\n\n.. code:: css\n\n .header-1 { font-weight: bold; font-size: 12px; font-color: red; }\n .header-2 { font-weight: bold; font-size: 16px; font-color: blue; }\n .header-3 { font-weight: bold; font-size: 12px; font-color: green; }\n\nThe property ``font-weight: bold;`` appears three times, and\n``font-size: 12px;`` appears twice. This is not DRY (Don't Repeat\nYourself).\n\nScenario 2 - Stale or Unused CSS\n''''''''''''''''''''''''''''''''\n\nInside a CSS file you find the following:\n\n.. code:: css\n\n .banner-video {\n position: absolute;\n top: 48%;\n left: 50%;\n min-width: 100%;\n min-height: 100%;\n /*width: auto;*/\n /*max-height: 30.5em;*/\n z-index: -100;\n transform: translateX(-50%) translateY(-50%);\n background-color: rgba(0,0,0,1);\n background-size: contain;\n transition: 1s opacity;\n }\n\nSix months later the person who wrote this CSS is then asked to remove\nbanner-video from the homepage. More often than not the\nfront-end developer will remove the CSS class from the HTML file, but\nnot from the CSS file. This leaves unused CSS lurking in the project.\n\nReasons include:\n^^^^^^^^^^^^^^^^\n\n- Forgetting to delete the rule from the CSS file.\n- Fear that the class is used somewhere else and that it might break\n the site.\n- Being too busy to search all of the files in their project for other\n potential use cases.\n\nNow 326 bytes worth of stale CSS data lurks in the style files.\n\nScenario 3 - Modern CSS Pre-compiler:\n'''''''''''''''''''''''''''''''''''''\n\nCSS compilation with SASS/SCSS, PostCSS, or LESS is awesome, and makes\nwriting lots of CSS rules easy. Tools like these allow auto-generation\nof hundreds of header rules like the ones above. If care is not taken\nthis leverage can rapidly grow the CSS file.\n\nSCSS Mixin example from a recent project:\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n.. code:: css\n\n @mixin text($font-color, $font-size, $font-family:\"Open Sans\", $line-height:inherit) {\n color: $font-color;\n font-size: $font-size;\n font-family: $font-family, $default-font-family;\n line-height: $line-height;\n }\n\nThis mixin is called using ``@include`` as follows:\n\n.. code:: css\n\n @include text($color-blue, rem-calc(14px), $default-font-family);\n\nIt turns out that ``@include text(...)`` is called 627 times in our\nSCSS. Most of these ``@include`` statements include at least one\nmatching input parameter resulting in thousands of duplicate CSS\nproperties.\n\nAuto-generating ``font-size: 1rem;`` 500 times is now super easy with a\npre-compiler and a for-loop. Some might say, ::\n\n Well we minified it to save space.\n\nYes but, ::\n\n Why did you write the same property 500 times in your main CSS file?\n\nCSS File size does matter. For consideration:\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n- Longer download times increase user bounce rates especially on mobile\n devices.\n- Data pollution on the Internet.\n- Increased likelihood of style bugs.\n- Increased time required to implement new changes and\n deprecate features.\n\nWhat it is not\n~~~~~~~~~~~~~~\n\nThis tool is not designed to replace the need to hand-craft complex CSS property or rule declarations.\n\n* Custom CSS would need to be written for Multi-rule classes, Background images, url() values, multi-word fonts, and some shorthand properties.\n\nThe following is an example of something this tool in not intended to\ngenerate, and something that still needs to be written by hand.\n\n.. code:: css\n\n .home-banner {\n background: url(\"https://somewhere.net/images/banner/home-mainbanner-bg.jpg\") no-repeat;\n font-family: \"Open Sans\",\"Source Sans Pro\",Arial;\n background-repeat: no-repeat;\n background-size: cover;\n min-height: 7rem;\n font-weight: bold;\n font-size: 3.5625rem;\n color: white;\n line-height: 3.6875rem;\n text-align: center;\n text-shadow: -2px 2px 4px rgba(0,0,0,0.5);\n }\n\nValuable References\n~~~~~~~~~~~~~~~~~~~\n\n `Blowdrycss Documentation <http://blowdrycss.readthedocs.io/en/latest/index.html>`__\n\n `Github Repo <https://github.com/nueverest/blowdrycss>`__\n\n `Slides presented at DessertPy <https://docs.google.com/presentation/d/1wjkbvQUorD9rzdPWjwPXaJcYPOBnrjE1qUJY2M4xwuY/edit#slide=id.gc6f8badac_0_0>`__\n\n `W3C Full CSS property table <http://www.w3.org/TR/CSS21/propidx.html>`__\n\n `Don't Repeat Yourself <https://en.wikipedia.org/wiki/Don%27t_repeat_yourself>`__\n\n `Download Python <https://www.python.org/downloads/>`__\n\n `cssutils 1.0.1+ <https://bitbucket.org/cthedot/cssutils>`__\n\n `watchdog 0.8.2+ <https://pypi.python.org/pypi/watchdog/0.8.3>`__\n\nLicense\n~~~~~~~\n\n The `MIT license <https://github.com/nueverest/blowdrycss/blob/master/LICENSE>`__\n\nHow to Contribute\n~~~~~~~~~~~~~~~~~\n\n- Open an Issue first and get community buy-in.\n- Write Code\n- Write Unit Tests (All tests must pass. 100% coverage preferred.)" }, { "alpha_fraction": 0.610837459564209, "alphanum_fraction": 0.7142857313156128, "avg_line_length": 13.535714149475098, "blob_id": "308797b7c0c2552825eed614e28e447b3c5800a5", "content_id": "0c3b98b715235c0298a6f22dba70151a61aba9e9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 406, "license_type": "permissive", "max_line_length": 41, "num_lines": 28, "path": "/travis_requirements.txt", "repo_name": "acnagy/test-blowdrycss", "src_encoding": "UTF-8", "text": "# Installation and distribution\npip>=8.1.1\nsetuptools>=19.6.2\n\n# CSS Creation\ncssutils>=1.0.1\n\n# Markdown to reStructuredText Conversion\npypandoc>=1.1.2\n\n# Python 2.7 Support\nfuture>=0.15.2\n\n# Automatic CSS generation on save\nwatchdog>=0.8.3\n\n# Documentation\n# sphinx>=1.3.5\n\n# Tox and Travis Interface\ntox>=2.3.1\ntox-travis>=0.4\n\n# Development\ncoverage>=4.0.2\ncoveralls>=1.1\n# wheel>=0.26.0\n# twine>=1.6.5" }, { "alpha_fraction": 0.4838709533214569, "alphanum_fraction": 0.6935483813285828, "avg_line_length": 14.75, "blob_id": "fc0bdeec4a9048d8878cda0bd7fbcb8df95936e2", "content_id": "4a488237a234f3d0d44f9af2ad3debea84f88a3a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 62, "license_type": "permissive", "max_line_length": 15, "num_lines": 4, "path": "/rtd_requirements.txt", "repo_name": "acnagy/test-blowdrycss", "src_encoding": "UTF-8", "text": "cssutils>=1.0.1\npypandoc==1.1.3\nfuture>=0.15.2\nwatchdog>=0.8.3" }, { "alpha_fraction": 0.5713943243026733, "alphanum_fraction": 0.578581690788269, "avg_line_length": 35.77092361450195, "blob_id": "42a48c61276db9f6958085c7555ed213d857207a", "content_id": "dcdbbfd8713ca5d63a38b2cf23ccd1bce8d78a2f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8348, "license_type": "permissive", "max_line_length": 120, "num_lines": 227, "path": "/blowdrycss/unit_tests/test_watchdogwrapper_main.py", "repo_name": "acnagy/test-blowdrycss", "src_encoding": "UTF-8", "text": "# python 2\nfrom __future__ import absolute_import, print_function, unicode_literals, with_statement\nimport _thread\n\n# builtins\nfrom unittest import TestCase, main\nimport logging\nimport sys\nfrom io import StringIO, open\nfrom time import sleep\nfrom os import path, remove\n\n# plugins\nfrom blowdrycss.utilities import unittest_file_path, change_settings_for_testing, make_directory\nfrom blowdrycss import watchdogwrapper\nimport blowdrycss_settings as settings\n\nchange_settings_for_testing()\n\n\nclass TestWatchdogWrapperMain(TestCase):\n passing = True\n non_matching = ''\n output = ''\n\n def monitor_modify_delete_stop(self, file_path):\n \"\"\" Monitor console output. Modify the file to trigger watchdog on_modified().\n Delete file at file_path_to_delete. Wait for output. Stop watchdogwrapper.main()\n Reference: http://stackoverflow.com/questions/7602120/sending-keyboard-interrupt-programmatically\n\n \"\"\"\n substrings = [\n '~~~ blowdrycss started ~~~',\n 'Auto-Generated CSS',\n 'Completed',\n 'blowdry.css',\n 'blowdry.min.css',\n ]\n\n saved_stdout = sys.stdout # Monitor console\n try:\n out = StringIO()\n sys.stdout = out\n\n # Wait for main() to start.\n while 'Ctrl + C' not in out.getvalue():\n sleep(0.05)\n\n # Modify file\n with open(file_path, 'w') as generic_file:\n generic_file.write('<html></html>')\n\n # Delete the file.\n remove(file_path)\n\n # IMPORTANT: Must wait up to 5 seconds for output otherwise test will fail.\n count = 0\n while substrings[-1] not in out.getvalue():\n if count > 100: # Max wait is 5 seconds = 100 count * 0.05 sleep\n break\n else:\n sleep(0.05)\n count += 1\n\n output = out.getvalue()\n\n for substring in substrings:\n if substring not in output:\n self.passing = False\n self.non_matching = substring\n self.output = output\n self.assertTrue(substring in output, msg=substring + '\\noutput:\\n' + output)\n finally:\n sys.stdout = saved_stdout\n _thread.interrupt_main() # Stop watchdogwrapper.main().\n\n def monitor_limit_expires_stop(self):\n \"\"\" Monitor console output. Wait for output based on LimitTimer expiration. Stop watchdogwrapper.main()\n Reference: http://stackoverflow.com/questions/7602120/sending-keyboard-interrupt-programmatically\n\n \"\"\"\n substrings = [\n '~~~ blowdrycss started ~~~',\n 'Auto-Generated CSS',\n 'Completed',\n 'blowdry.css',\n 'blowdry.min.css',\n '----- Limit timer reset -----',\n ]\n\n saved_stdout = sys.stdout # Monitor console\n try:\n out = StringIO()\n sys.stdout = out\n\n # Wait for main() to start.\n while 'Ctrl + C' not in out.getvalue():\n sleep(0.05)\n\n # IMPORTANT: Must wait up to 5 seconds for output otherwise test will fail.\n count = 0\n while substrings[-1] not in out.getvalue():\n if count > 100: # Max wait is 5 seconds = 100 count * 0.05 sleep\n break\n else:\n sleep(0.05)\n count += 1\n\n output = out.getvalue()\n\n for substring in substrings:\n if substring not in output:\n self.passing = False\n self.non_matching = substring\n self.output = output\n self.assertTrue(substring in output, msg=substring + '\\noutput:\\n' + output)\n finally:\n sys.stdout = saved_stdout\n _thread.interrupt_main() # Stop watchdogwrapper.main().\n\n def test_main_auto_generate_True_on_modify(self):\n # Integration test\n logging.basicConfig(level=logging.DEBUG)\n html_text = '<html></html>'\n test_examplesite = unittest_file_path(folder='test_examplesite')\n delete_dot_html = unittest_file_path(folder='test_examplesite', filename='delete.html')\n\n # Directory must be created for Travis CI case\n make_directory(test_examplesite)\n self.assertTrue(path.isdir(test_examplesite))\n\n # Create file delete.html\n with open(delete_dot_html, 'w') as _file:\n _file.write(html_text)\n\n # Double check to ensure it got created.\n self.assertTrue(path.isfile(delete_dot_html))\n\n auto_generate = settings.auto_generate # original\n settings.auto_generate = True\n _thread.start_new_thread(self.monitor_modify_delete_stop, (delete_dot_html,))\n watchdogwrapper.main() # Caution: Nothing will run after this line unless _thread.interrupt_main() is called.\n self.assertTrue(self.passing, msg=self.non_matching + ' not found in output:\\n' + self.output)\n settings.auto_generate = auto_generate # reset setting\n\n def test_main_auto_generate_True_limit_timer_expired(self):\n # Integration test\n logging.basicConfig(level=logging.DEBUG)\n html_text = '<html><div class=\"blue\"></div></html>'\n test_examplesite = unittest_file_path(folder='test_examplesite')\n limit_dot_html = unittest_file_path(folder='test_examplesite', filename='limit_expired.html')\n\n # Directory must be created for Travis CI case\n make_directory(test_examplesite)\n self.assertTrue(path.isdir(test_examplesite))\n\n # Create file delete.html\n with open(limit_dot_html, 'w') as _file:\n _file.write(html_text)\n\n # Double check to ensure it got created.\n self.assertTrue(path.isfile(limit_dot_html))\n\n auto_generate = settings.auto_generate # original\n time_limit = settings.time_limit\n\n settings.auto_generate = True\n settings.time_limit = 0.1 # reduce the time_limit\n _thread.start_new_thread(self.monitor_limit_expires_stop, ())\n watchdogwrapper.main() # Caution: Nothing will run after this line unless _thread.interrupt_main() is called.\n self.assertTrue(self.passing, msg=self.non_matching + ' not found in output:\\n' + self.output)\n\n remove(limit_dot_html) # delete files\n settings.auto_generate = auto_generate # reset setting\n settings.time_limit = time_limit\n\n def test_main_auto_generate_False(self):\n # Integration test\n logging.basicConfig(level=logging.DEBUG)\n substrings = [\n '~~~ blowdrycss started ~~~',\n 'Auto-Generated CSS',\n 'Completed',\n 'blowdry.css',\n 'blowdry.min.css',\n ]\n html_text = '<html></html>'\n test_examplesite = unittest_file_path(folder='test_examplesite')\n delete_dot_html = unittest_file_path(folder='test_examplesite', filename='delete.html')\n auto_generate = settings.auto_generate # original\n\n # Directory must be created for Travis CI case\n if not path.isdir(test_examplesite):\n make_directory(test_examplesite)\n\n self.assertTrue(path.isdir(test_examplesite))\n\n # Create delete.html\n with open(delete_dot_html, 'w') as _file:\n _file.write(html_text)\n\n self.assertTrue(path.isfile(delete_dot_html))\n\n saved_stdout = sys.stdout\n try:\n out = StringIO()\n sys.stdout = out\n\n\n settings.auto_generate = False\n watchdogwrapper.main()\n\n remove(delete_dot_html) # Delete delete.html\n\n sleep(0.25) # IMPORTANT: Must wait for output otherwise test will fail.\n\n output = out.getvalue()\n\n for substring in substrings:\n self.assertTrue(substring in output, msg=substring + '\\noutput:\\n' + output)\n finally:\n sys.stdout = saved_stdout\n settings.auto_generate = auto_generate # reset setting\n\n\nif __name__ == '__main__':\n main()\n\n" } ]
5
zhongh3/redmart_puzzles
https://github.com/zhongh3/redmart_puzzles
bd7f0cff97f211f7b05c14a49d3b6b672fddfbf2
2749f8f39476e2663438080e4480a9adbcf6b296
5ab42093ede99eae7ca5fd06f5f0b4875cf67e97
refs/heads/master
2020-03-27T13:24:51.158095
2018-09-17T10:26:08
2018-09-17T10:26:08
146,607,701
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5888022184371948, "alphanum_fraction": 0.5955331325531006, "avg_line_length": 39.33333206176758, "blob_id": "a56241af8aca18872dffad952c9d96357a624e40", "content_id": "26a5495ac361b58b6a9ea7ad48037efc1e06f8ec", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6537, "license_type": "no_license", "max_line_length": 111, "num_lines": 162, "path": "/ski.py", "repo_name": "zhongh3/redmart_puzzles", "src_encoding": "UTF-8", "text": "import logging\n# change logging level from INFO to DEBUG to print debugging logs\nlogging.basicConfig(level=logging.INFO, format='%(levelname)s - %(funcName)s - %(lineno)d - %(message)s')\n\n\nclass Area:\n # an Area is a point on the Map\n def __init__(self, x=-1, y=-1, height=-1):\n # the location coordinates (x, y) corresponding to the\n # row (x) and column (y) numbers on the Map (both 0-indexed)\n self.x = x\n self.y = y\n\n # height: corresponding to the value read from the Map\n self.height = height\n\n # b_visited: a boolean, keeping track of whether the Area has been visited\n self.b_visited = False\n\n # path_length: the longest length starting from the current Area\n # the default value '0' indicates that the Area hasn't been visited\n # the min path length of an visited Area is \"1\", which means it's the lowest among the neighbours,\n # it can only visit itself\n self.path_length = 0\n\n # bottom_height: the height of the lowest Area that can be visited from the current Area\n # i.e. the end of the path starting from current Area\n # the default value '-1' indicates that the Area hasn't been visited\n self.bottom_height = -1\n\n def __str__(self):\n return \"Area ({}, {}) - Height={}, Visited?={}, Path Length={}, Bottom Height={}\"\\\n .format(self.x, self.y, self.height, self.b_visited, self.path_length, self.bottom_height)\n\n def update_parameters(self, new_length, new_bottom):\n # update both path length and bottom height if the new path length is longer\n # when the old and new path lengths are equal, break tie with smaller bottom height\n if self.path_length < new_length:\n self.path_length = new_length\n self.bottom_height = new_bottom\n elif self.path_length == new_length:\n self.bottom_height = min(new_bottom, self.bottom_height)\n\n\ndef prepare_map(file_name):\n # parse input file to get the height of each Area\n logging.info(\"Input File: \\\"./{}\\\"\".format(file_name))\n\n # open the input file for reading\n file = open(file_name, \"r\")\n\n # the 1st line of the input file indicates the size of the map (row and column)\n row, column = map(int, file.readline().strip().split(\" \"))\n\n logging.info(\"Map Size: row = {}, column = {}\".format(row, column))\n\n # ski_map is a list of lists where each entry is an Area object\n # the indexes of the entry denotes the location on the map\n # i.e. area = ski_map[i][j] ==> area.x = i; area.y = j\n ski_map = []\n\n for i in range(row):\n input_row = list(map(int, file.readline().strip().split(\" \")))\n\n if len(input_row) != column:\n raise Exception(\"Line {} of input file has incorrect no. of values.\".format(i + 2))\n\n area_row = []\n for j in range(column):\n # range check for height -> [0, 1500]\n if input_row[j] < 0 or input_row[j] > 1500:\n raise Exception(\"Area({}, {}) - Height = {} is out of range\".format(i, j, input_row[j]))\n area_row.append(Area(i, j, input_row[j]))\n\n ski_map.append(area_row)\n\n file.close()\n\n return ski_map, row, column\n\n\ndef main():\n def visit_area(area):\n\n def compare_with_neighbour(area, neighbour):\n # update path_length and bottom_height of current area after comparing with neighbour area\n if area.height <= neighbour.height:\n # the base case:\n # the new path length is '1' (the current area itself)\n # the new bottom height is the height of the current area itself\n area.update_parameters(1, area.height)\n else:\n visit_area(neighbour) # recursive call\n new_length = neighbour.path_length + 1\n new_bottom = neighbour.bottom_height\n area.update_parameters(new_length, new_bottom)\n\n # if an Area has already been visited, the parameters are up-to-date\n if area.b_visited:\n return\n\n # start to visit neighbours in clockwise order: N -> E -> S -> W\n # 1. North\n if area.x == 0: # there's no neighbour to the North of current area -> the base case\n area.update_parameters(1, area.height)\n else:\n neighbour = ski_map[area.x - 1][area.y]\n compare_with_neighbour(area, neighbour)\n\n # 2. East\n if area.y == (column - 1): # there's no neighbour to the East of current area -> the base case\n area.update_parameters(1, area.height)\n else:\n neighbour = ski_map[area.x][area.y + 1]\n compare_with_neighbour(area, neighbour)\n\n # 3. South\n if area.x == (row - 1): # there's no neighbour to the South of current area -> the base case\n area.update_parameters(1, area.height)\n else:\n neighbour = ski_map[area.x + 1][area.y]\n compare_with_neighbour(area, neighbour)\n\n # 4. West\n if area.y == 0: # there's no neighbour to the West of current area -> the base case\n area.update_parameters(1, area.height)\n else:\n neighbour = ski_map[area.x][area.y - 1]\n compare_with_neighbour(area, neighbour)\n\n # all neighbours have been visited, the current area has been updated/visited as well\n area.b_visited = True\n\n return area.height - area.bottom_height\n\n # ski_map is a list of lists with size row x column, where each entry is an Area\n # the indexes of each Area in ski_map is their location on the map\n ski_map, row, column = prepare_map(\"map.txt\")\n\n # the minimum of max_length should be '1' and max_drop should be '0'\n # init both to negative values and update them accordingly\n max_length = -1\n max_drop = -1 # the max_drop is not the maximum drop on the map, it's the drop of the path with max_length\n\n for i in range(row):\n for j in range(column):\n area = ski_map[i][j]\n drop = visit_area(area)\n if max_length < area.path_length:\n logging.debug(area)\n max_length = area.path_length\n max_drop = drop\n elif max_length == area.path_length:\n # break tie with larger drop\n logging.debug(area)\n max_drop = max(drop, max_drop)\n\n print(\"Results: max length = {}, drop = {}\".format(max_length, max_drop))\n\n\nif __name__ == \"__main__\":\n main()\n\n\n\n" }, { "alpha_fraction": 0.5771301984786987, "alphanum_fraction": 0.5888632535934448, "avg_line_length": 40.759037017822266, "blob_id": "0157c5aa3a83d3f815e922dc8645061b88897ccf", "content_id": "b22c8161869d899ebb18975e7dc1125c4602a812", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10400, "license_type": "no_license", "max_line_length": 119, "num_lines": 249, "path": "/prize.py", "repo_name": "zhongh3/redmart_puzzles", "src_encoding": "UTF-8", "text": "import pandas as pd\nimport csv\nfrom enum import IntEnum\n\nimport logging\n# change logging level from INFO to DEBUG to print debugging logs\nlogging.basicConfig(level=logging.INFO, format='%(levelname)s - %(funcName)s - %(lineno)d - %(message)s')\n\n\nclass Idx(IntEnum):\n p_id = 0\n price = 1\n length = 2\n width = 3\n height = 4\n weight = 5\n volume = 6\n unit_price = 7\n\n\n# The tote’s usable space is 45 centimeters long, 30 wide and 35 high\ntote_volume = 45 * 30 * 35 # 47250 cm3\n\n# To control the number of rows to read from the input file (for debugging use)\nnum_rows = None # set to None to read the whole file\n\n\nclass Product:\n def __init__(self, record=None):\n self.p_id = int(record.iloc[Idx.p_id]) # product id\n self.value = int(record.iloc[Idx.price]) # price (cent)\n self.weight = int(record.iloc[Idx.weight]) # weight (g)\n self.volume = int(record.iloc[Idx.volume]) # volume (cm3)\n self.unit_price = record.iloc[Idx.unit_price] # price per cubic centimeter = price/volume (cent/cm3)\n\n def __str__(self):\n return \"Product ID: {} - $={}, Weight={}, Volume={}, Unit$={}\".\\\n format(self.p_id, self.value, self.weight, self.volume, self.unit_price)\n\n def __lt__(self, other):\n # define a way to sort products by \"unit price\" first and break tie by \"volume\" and then \"weight\"\n\n if self.unit_price < other.unit_price:\n # higher unit price wins\n return True\n\n if self.unit_price == other.unit_price:\n if self.volume < other.volume:\n # with same unit price, bigger volume wins\n return True\n elif self.volume == other.volume and self.weight > other.weight:\n # with same unit price and volume, lighter weight wins\n return True\n\n # if all 3 values are the same, the order of the 2 products doesn't matter for our use case\n # default self < other to 'False'\n return False\n\n\nclass BestState:\n def __init__(self, space, min_volume):\n self.space = 0 if space == 0 else space + min_volume - 1\n self.id_sum = 0\n self.value = 0\n self.weight = 0\n\n def __lt__(self, other):\n # compare 2 BestStates by value and break tie by weight\n if self.value < other.value:\n # higher value wins\n return True\n if self.value == other.value and self.weight > other.weight:\n # with same value, lighter weight wins\n return True\n\n # if 2 BestState have the same value and weight\n # default self < other to 'False'\n return False\n\n def __set_state(self, state):\n self.id_sum = state.id_sum\n self.value = state.value\n self.weight = state.weight\n\n def update_state(self, state1, state2, product):\n if product.volume > self.space:\n self.__set_state(state1)\n elif state2.value + product.value > state1.value or \\\n (state2.value + product.value == state1.value and state2.weight + product.weight < state1.weight):\n self.id_sum = state2.id_sum + product.p_id\n self.value = state2.value + product.value\n self.weight = state2.weight + product.weight\n else:\n self.__set_state(state1)\n\n\nclass Basket:\n def __init__(self, space, name=\"\"):\n self.name = name\n self.items = []\n self.num_items = 0 # number of items in the basket\n self.capacity = space # the total capacity of the basket (cm3)\n self.volume = 0 # the total volume of products in the basket (cm3)\n self.value = 0 # the total value of all products in the basket (cent)\n self.weight = 0 # the total weight of all products in the basket (gram)\n self.id_sum = 0 # the sum of product ID of all products in the basket\n\n def add_a_product(self, product):\n if self.capacity - self.volume >= product.volume:\n self.items.append(product)\n self.num_items += 1\n self.volume += product.volume\n self.value += product.value\n self.weight += product.weight\n self.id_sum += product.p_id\n else:\n print(\"Failed to add product ID = {}, volume = {} into the basket - remaining space = {}\".\n format(product.p_id, product.volume, self.capacity - self.volume))\n\n def __str__(self):\n return \"{} - total {} products, total volume = {}, total value={}, \" \\\n \"total weight={}, ID sum={}, space left={}\".\\\n format(self.name, self.num_items, self.volume, self.value,\n self.weight, self.id_sum, self.capacity - self.volume)\n\n def print_content(self):\n # to print detailed information of the basket, including all products in the basket\n print(\"{} - total {} products, total volume = {}, total value={}, \"\n \"total weight={}, ID sum={}, space left={}\".\n format(self.name, self.num_items, self.volume, self.value,\n self.weight, self.id_sum, self.capacity - self.volume))\n\n for i in range(self.num_items):\n print(\"{} - {}\".format(i + 1, self.items[i]))\n\n\ndef process_input(csv_file_name):\n\n # input csv file format (no header):\n # | 0 | 1 | 2 | 3 | 4 | 5 |\n # | product ID | price | length | width | height | weight |\n # | / | cents | cm | cm | cm | g |\n # ---------------------------------------------------------\n inputs = pd.read_csv(csv_file_name, header=None, names=list(range(6)), nrows=num_rows) # DataFrame\n\n row, column = inputs.shape\n logging.info(\"'{}' - row = {}, column = {}\".format(csv_file_name, row, column))\n\n # calculate volume and unit prices for all the products\n volumes = []\n unit_prices = []\n\n for i in range(row):\n volumes.append(inputs.iloc[i, Idx.length] * inputs.iloc[i, Idx.width] * inputs.iloc[i, Idx.height])\n unit_prices.append(inputs.iloc[i, Idx.price] / volumes[i])\n\n min_volume = min(volumes)\n logging.info(\"min volume = {}\".format(min_volume))\n\n # add volume and unit price as new columns of inputs\n inputs[Idx.volume] = pd.Series(volumes, index=inputs.index)\n inputs[Idx.unit_price] = pd.Series(unit_prices, index=inputs.index)\n\n # updated format of inputs\n # | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 |\n # | product ID | price | length | width | height | weight | volume | unit price |\n # | / | cents | cm | cm | cm | g | cm3 | cent/cm3 |\n # -------------------------------------------------------------------------------\n\n products = [] # a list contains all candidate products (i.e. products that can fit into the tote individually)\n for i in range(row):\n # dimensions is a list containing length, width and height of a product, sorted in ascending order\n dimensions = sorted([inputs.iloc[i, Idx.length], inputs.iloc[i, Idx.width], inputs.iloc[i, Idx.height]])\n\n # Assume that the orientation of a product doesn't matter\n # (i.e. no request to place the product upright always).\n # For simplicity, only consider the potential rotation of the product be 90, 180, 270 degrees.\n if dimensions[0] > 30 or dimensions[1] > 35 or dimensions[2] > 45:\n # the product doesn't fit into the tote\n continue\n\n # the product can fit into the tote, add it to the candidate list\n products.append(Product(inputs.iloc[i]))\n\n logging.info(\"total no. of candidate products = {}\".format(len(products)))\n\n products.sort(reverse=True) # sort products by unit price in descending order\n\n return products, min_volume\n\n\ndef write_to_csv(products, num_lines):\n # utility function to output candidate products to csv\n # num_lines: the number of products to write to csv\n\n with open('candidate_products.csv', 'w') as csv_file:\n csv_writer = csv.writer(csv_file,)\n csv_writer.writerow([\"index\", \"product id\", \"value\", \"volume\", \"weight\", \"unit price\"])\n for i in range(num_lines + 1):\n csv_writer.writerow([i, products[i].p_id, products[i].value,\n products[i].volume, products[i].weight, products[i].unit_price])\n\n\ndef main():\n products, min_volume = process_input(\"./products.csv\")\n\n max_num = tote_volume//min_volume\n logging.info(\"max number of products in the tote = {}\".format(max_num))\n\n # products are already sorted according to unit price, volume and weight\n # since there are only maximum of max_num products that can fit into the tote,\n # search the top candidates (e.g. choose 3 times of the max_num) instead of the complete list\n products = products[0: max_num * 3] # to search the whole list, skip this line\n\n # write_to_csv(products, len(products)-1)\n\n # originally: need a table of size (len(products) + 1) x (tote_volume - 1) to save the BestStates\n # optimization: (saving in both computation time and space)\n # reduce the table size by (min_volume - 1) columns, since nothing can fit if the space is less than min_volume\n # finally: table size = (len(products) + 1) x (tote_volume - min_volume + 2)\n table = [[BestState(i, min_volume) for i in range(tote_volume - min_volume + 2)] for j in range(len(products) + 1)]\n\n for i in range(1, len(table)):\n for j in range(1, tote_volume - min_volume + 2):\n x = j - products[i-1].volume\n table[i][j].update_state(table[i-1][j], table[i-1][max(0, x)], products[i-1])\n\n final = table[len(products)][tote_volume - min_volume + 1]\n\n # the sum of IDs is already obtained in the \"final\" BestState\n logging.info(\"best total value = {}, weight = {}, ID sum = {}\".\n format(final.value, final.weight, final.id_sum))\n\n # to obtain details of products in the tote\n tote = Basket(tote_volume, \"TOTE\")\n\n j = tote_volume - min_volume + 1\n for i in range(len(products), 0, -1):\n if table[i][j].value != table[i-1][j].value:\n tote.add_a_product(products[i-1])\n x = j - products[i-1].volume\n j = max(0, x)\n\n print(tote)\n # tote.print_content() # to print details of content in the tote\n\n\nif __name__ == \"__main__\":\n main()\n" }, { "alpha_fraction": 0.6838741302490234, "alphanum_fraction": 0.7205014824867249, "avg_line_length": 55.5, "blob_id": "a3e31b50b32ce918dc6bc9ea2ca6dbb5196fa653", "content_id": "400224c86cd83430e82c7955fbbfc8a71f531317", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 4076, "license_type": "no_license", "max_line_length": 681, "num_lines": 72, "path": "/README.md", "repo_name": "zhongh3/redmart_puzzles", "src_encoding": "UTF-8", "text": "# Attempt to Solve Coding Puzzles from RedMart\n\n### Note:\nThe code is written and tested in **Python 3.7** only.\n\n## I. Skiing in Singapore (http://geeks.redmart.com/2015/01/07/skiing-in-singapore-a-coding-diversion/):\n\n### 1.1 Description:\nIn digital form the map looks like the number grid below.\n\n```\n4 4 \n4 8 7 3\n2 5 9 3 \n6 3 2 5 \n4 4 1 6\n```\n\nThe first line (4 4) indicates that this is a **4x4** map. Each number represents the elevation of that area of the mountain. From each area (i.e. box) in the grid you can go north, south, east, west - but only if the elevation of the area you are going into is less than the one you are in. I.e. you can only ski downhill. You can start anywhere on the map and you are looking for a starting point with the longest possible path down as measured by the number of boxes you visit. And if there are several paths down of the same length, you want to take the one with the steepest vertical drop, i.e. the largest difference between your starting elevation and your ending elevation.\n\nOn this particular map the longest path down is of length=5 and it’s highlighted in bold below: **9-5-3-2-1**.\n\nThere is another path that is also length five: **8-5-3-2-1**. However the tie is broken by the first path being steeper, dropping from 9 to 1, a drop of 8, rather than just 8 to 1, a drop of 7.\n\nYour challenge is to write a program in your favorite programming language to find the longest (and then steepest) path on this map specified in the format above. It’s 1000x1000 in size, and all the numbers on it are between 0 and 1500.\n\n### 1.2 Instructions:\n**1.2.1** The solution is in **ski.py**.\n\n**1.2.2** The default map is from _./map.txt_. To change the input file, please replace the _./map.txt_ or change the source code in _main()_:\n```python\nski_map, row, column = prepare_map(\"map.txt\")\n```\n**1.2.3.** As the challenge is to find the longest (and then steepest) path on this map, the solution only provides the length of the path and its steepness (i.e. the drop from the starting area to the ending area). It didn't record the entire path. The results are printed as (take the example from the **Description**)\n```\nResults: max length = 5, drop = 8\n```\n**1.2.4.** To print debugging logs, simply change logging level from **logging.INFO** to **logging.DEBUG**:\n```python\nlogging.basicConfig(level=logging.INFO, format='%(levelname)s - %(funcName)s - %(lineno)d - %(message)s')\n```\n\n\n## II. 1,000,000th Customer Prize (http://geeks.redmart.com/2015/10/26/1000000th-customer-prize-another-programming-challenge/):\n\n### 2.1 Description:\nGiven 1 tote and a list of products, the goal is to maximize the dollar value of the products in the tote. Here are the rules:\n\n**2.1.1.** The tote’s usable space is **45** centimeters long, **30** wide and **35** high.\n\n**2.1.2.** Everything you take, together, must completely fit into a tote.\n##### You can assume that if the products fit into the tote both individually and together by total volume, that you'll be able to find a way to pack them in.\n\n**2.1.3.** A lighter tote is better, as long as you don’t sacrifice any dollar value.\n\n**2.1.4.** The input file contains 20,000 products, one per line. Each line has the following fields separated by a comma:\n```\nproduct ID, price (cent), length (cm), width (cm), height (cm), weight (g)\n```\n**2.1.5.** You can only take 1 of any product. \n\n**2.1.6.** Find the sum of product IDs of all the products you take. \n \n### 2.2 Instructions:\n**2.2.1.** The solution is in **prize.py**.\n\n**2.2.2.** Even though the challenge only asks for the sum of product IDs, the implementation also includes the algorithm to find the details of all the products in the tote.\n\n**2.2.3.** For optimization, all the candidate products (i.e. products that can fit into the tote individually) are sorted first. Only the top candidates (3 x max no. of products that can fit into the tote) are processed further. To process the whole list or change the number of top candidates, just edit the line: \n```python\nproducts = products[0: max_num * 3]\n```\n" }, { "alpha_fraction": 0.5707424879074097, "alphanum_fraction": 0.5830156803131104, "avg_line_length": 41.45307540893555, "blob_id": "c410bdcac2913a370df02825244d9bf874339179", "content_id": "f711a4ff0cc6bfb627cdb9ffeb02441f676b0c2c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13120, "license_type": "no_license", "max_line_length": 115, "num_lines": 309, "path": "/prize_old.py", "repo_name": "zhongh3/redmart_puzzles", "src_encoding": "UTF-8", "text": "import pandas as pd\nimport csv\nfrom enum import IntEnum\n\nimport logging\n# change logging level from INFO to DEBUG to print debugging logs\nlogging.basicConfig(level=logging.INFO, format='%(levelname)s - %(funcName)s - %(lineno)d - %(message)s')\n\n\nclass Idx(IntEnum):\n p_id = 0\n price = 1\n length = 2\n width = 3\n height = 4\n weight = 5\n volume = 6\n unit_price = 7\n\n\n# The tote’s usable space is 45 centimeters long, 30 wide and 35 high\ntote_volume = 45 * 30 * 35 # 47250 cm3\n\n# To control the number of rows to read from the input file (for debugging use)\nnum_rows = None # set to None to read the whole file\n\n\nclass Product:\n def __init__(self, record=None):\n self.p_id = int(record.iloc[Idx.p_id]) # product id\n self.price = int(record.iloc[Idx.price]) # price (cent)\n self.length = int(record.iloc[Idx.length]) # length (cm) - it's ok to discard this info\n self.width = int(record.iloc[Idx.width]) # width (cm) - it's ok to discard this info\n self.height = int(record.iloc[Idx.height]) # height (cm) - it's ok to discard this info\n self.weight = int(record.iloc[Idx.weight]) # weight (g)\n self.volume = int(record.iloc[Idx.volume]) # volume (cm3)\n self.unit_price = record.iloc[Idx.unit_price] # price per cubic centimeter = price/volume (cent/cm3)\n\n def __str__(self):\n return \"Product ID: {} - $={}, L={}, W={}, H={}, Weight={}, V={}, Unit$ = {}\".\\\n format(self.p_id, self.price, self.length, self.width,\n self.height, self.weight, self.volume, self.unit_price)\n\n def __lt__(self, other):\n # define a way to sort products by \"unit price\" first and break tie by \"volume\" and then \"weight\"\n\n if self.unit_price < other.unit_price:\n # higher unit price wins\n return True\n\n if self.unit_price == other.unit_price:\n if self.volume < other.volume:\n # with same unit price, bigger volume is preferred\n return True\n elif self.volume == other.volume and self.weight > other.weight:\n # with same unit price and volume, lighter weight is preferred\n return True\n\n # if all 3 values are the same, the order of the 2 products doesn't matter for our use case\n # print the log just for information\n # default self < other to 'False' in this case\n if self.unit_price == other.unit_price and self.volume == other.volume and self.weight == other.weight:\n logging.info(\"Product ID {} and ID {} share same unit price, volume and weight\".\n format(self.p_id, other.p_id))\n\n return False\n\n\nclass Basket:\n # a basket is a collection of products who can fit into the tote all together\n def __init__(self, first, first_idx, volume=tote_volume):\n self.b_id = first_idx # use the index of the first product as the ID of the basket\n self.volume = volume # total volume of the basket (cm3)\n self.items = [first] # first is the 1st product added into the basket\n self.num_items = len(self.items) # number of items in the basket\n self.space = self.volume - first.volume # remaining space in the basket (cm3)\n self.value = first.price # the total value of all products in the basket\n self.weight = first.weight # the total weight of all products in the basket\n self.id_sum = first.p_id # the sum of product ID of all products in the basket\n\n def add_a_product(self, new_product):\n if self.space >= new_product.volume: # only add the new product if it fits into the basket\n self.items.append(new_product)\n self.num_items += 1\n self.space -= new_product.volume\n self.value += new_product.price\n self.weight += new_product.weight\n self.id_sum += new_product.p_id\n logging.debug(\"Added new product ID={} into the basket ID={}\".format(new_product.p_id, self.b_id))\n return True\n\n logging.debug(\"Failed to add new product ID={} into the basket ID={}\".format(new_product.p_id, self.b_id))\n return False\n\n def add_a_pair(self, products, pair_idx):\n if pair_idx[0] == pair_idx[1]:\n self.add_a_product(products[pair_idx[0]])\n else:\n self.add_a_product(products[pair_idx[0]])\n self.add_a_product(products[pair_idx[1]])\n\n def remove_last_product(self):\n if self.num_items >= 0:\n product = self.items.pop()\n self.num_items -= 1\n self.space += product.volume\n self.value -= product.price\n self.weight -= product.weight\n self.id_sum -= product.p_id\n logging.debug(\"Removed product ID={} from basket ID={}\".format(product.p_id, self.b_id))\n return True\n\n logging.debug(\"The basket ID={} is empty.\".format(self.b_id))\n return False\n\n def remove_last_pair(self):\n self.remove_last_product()\n self.remove_last_product()\n\n def __lt__(self, other):\n if self.value < other.value:\n # higher total value wins\n return True\n if self.value == other.value:\n if self.weight > other.weight:\n # with same total value, lighter weight is preferred\n return True\n elif self.weight == other.weight:\n raise Exception(\"Found Basket IDs: {} and {} with same values and weight.\".\n format(self.b_id, other.b_id))\n return False\n\n def __str__(self):\n return \"Basket ID: {} - total {} products, space left={}, total value={}, total weight={}, ID sum={}\".\\\n format(self.b_id, self.num_items, self.space, self.value, self.weight, self.id_sum)\n\n def print_content(self):\n # to print detailed information of the basket, including all products in the basket\n print(\"Basket ID: {} - total {} products, space left={}, total value={}, total weight={}, ID sum={}\".\n format(self.b_id, self.num_items, self.space, self.value, self.weight, self.id_sum))\n for i in range(self.num_items):\n print(\"{} - {}\".format(i + 1, self.items[i]))\n\n def fill_a_basket(self, products, start_idx, min_volume):\n # trying to add products into the basket\n # start the search from the product at index 'start_idx'\n\n idx_1 = -1 # keep track of the 1st index to failed\n idx_2 = -1 # keep track of the 1st index that succeeded after the 1st failure\n\n for i in range(start_idx, len(products)):\n if self.space < min_volume:\n print(\"Basket ID: {} is full\".format(self.b_id))\n print(self)\n return [idx_1, idx_2]\n\n if not self.add_a_product(products[i]): # failed to add current product into the basket\n idx_1 = i\n break\n\n if idx_1 > -1: # the 1st failure has happened\n for i in range(idx_1 + 1, len(products)):\n if self.space < min_volume:\n print(\"Basket ID: {} is full\".format(self.b_id))\n print(self)\n return [idx_1, idx_2]\n\n if self.add_a_product(products[i]): # this is the 1st success after the 1st failure\n idx_2 = i\n break\n\n return [idx_1, idx_2]\n\n\ndef process_input(csv_file_name):\n\n # input csv file format (no header):\n # | 0 | 1 | 2 | 3 | 4 | 5 |\n # | product ID | price | length | width | height | weight |\n # | / | cents | cm | cm | cm | g |\n # ---------------------------------------------------------\n inputs = pd.read_csv(csv_file_name, header=None, names=list(range(6)), nrows=num_rows) # DataFrame\n\n row, column = inputs.shape\n logging.info(\"'{}' - row = {}, column = {}\".format(csv_file_name, row, column))\n\n # calculate volume and unit prices for all the products\n volumes = []\n unit_prices = []\n for i in range(row):\n volumes.append(inputs.iloc[i, Idx.length] * inputs.iloc[i, Idx.width] * inputs.iloc[i, Idx.height])\n unit_prices.append(inputs.iloc[i, Idx.price]/volumes[i])\n\n min_volume = min(volumes)\n logging.info(\"min volume = {}\".format(min_volume))\n\n # add volume and unit prices as 2 new columns of inputs\n inputs[Idx.volume] = pd.Series(volumes, index=inputs.index)\n inputs[Idx.unit_price] = pd.Series(unit_prices, index=inputs.index)\n\n # updated format of inputs\n # | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 |\n # | product ID | price | length | width | height | weight | volume | unit price |\n # | / | cents | cm | cm | cm | g | cm3 | cent/cm3 |\n # -------------------------------------------------------------------------------\n\n products = [] # a list contains all candidate products (i.e. products that can fit into the tote individually)\n for i in range(row):\n # dimensions is a list containing length, width and height of a product, sorted in ascending order\n dimensions = sorted([inputs.iloc[i, Idx.length], inputs.iloc[i, Idx.width], inputs.iloc[i, Idx.height]])\n\n # Assume that the orientation of a product doesn't matter\n # (i.e. no request to place the product upright always).\n # For simplicity, only consider the potential rotation of the product be 90, 180, 270 degrees.\n if dimensions[0] > 30 or dimensions[1] > 35 or dimensions[2] > 45:\n # the product doesn't fit into the tote\n continue\n\n products.append(Product(inputs.iloc[i]))\n\n logging.info(\"total no. of candidate products = {}\".format(len(products)))\n\n products.sort(reverse=True) # sort products by unit price in descending order\n\n return products, min_volume\n\n\ndef write_to_csv(products, num_lines):\n # utility function to output sorted candidate products to csv\n # num_lines: the number of products to write to csv\n\n with open('sorted_candidate_products.csv', 'w') as csv_file:\n csv_writer = csv.writer(csv_file,)\n csv_writer.writerow([\"index\", \"product id\", \"value\", \"unit price\", \"volume\", \"weight\"])\n for i in range(num_lines + 1):\n csv_writer.writerow([i, products[i].p_id, products[i].price, products[i].unit_price,\n products[i].volume, products[i].weight])\n\n\ndef find_the_best_pair(products, start_idx, end_idx, max_value, space):\n # TODO: incomplete solution\n # it's possible for the better combination to be a single product, a pair or even more products\n # this solution here only looks for a single product or a pair, that's why it's incomplete\n\n pair = []\n for i in range(start_idx, end_idx+1):\n if products[i].volume > space:\n continue\n if products[i].price > max_value: # a single product is better than the pair\n max_value = products[i].price\n pair = [i, i]\n return max_value, pair\n\n space_left = space - products[i].volume\n for j in range(i+1, end_idx+1):\n if products[j].volume <= space_left:\n total_value = products[i].price + products[j].price\n if total_value > max_value: # a better pair is found\n max_value = total_value\n pair = [i, j]\n\n return max_value, pair\n\n\ndef main():\n print(\"Working in progress......\")\n products, min_volume = process_input(\"./products.csv\")\n\n # write_to_csv(products, 54)\n\n basket = Basket(products[0], 0)\n\n pair_idx = [0, 0]\n\n while basket.space >= min_volume:\n # pair_idx[0]: the 1st index to failed\n # pair_idx[1]: the 1st index that succeeded after the 1st failure\n pair_idx = basket.fill_a_basket(products, pair_idx[1]+1, min_volume)\n\n logging.info(\"1st index to fail = {}, 1st index to succeed after 1st failure = {}\".\n format(pair_idx[0], pair_idx[1]))\n\n if pair_idx[0] == -1 or pair_idx[1] == -1:\n break\n\n pair_value = products[pair_idx[0]-1].price+products[pair_idx[1]].price\n\n logging.info(\"last pair_value = {}, pair = {}\".format(pair_value, [pair_idx[0]-1, pair_idx[1]]))\n\n basket.remove_last_pair()\n\n # pair_idx[0]: the 1st index of the best pair\n # pair_idx[1]: the 2nd index of the best pair\n pair_value, pair_idx = find_the_best_pair(products, pair_idx[0]-1, pair_idx[1], pair_value, basket.space)\n\n logging.info(\"best pair_value = {}, pair = {}\".format(pair_value, pair_idx))\n\n basket.add_a_pair(products, pair_idx)\n # continue the search after the last product added into the basket, i.e. pair_idx[1]+1\n\n # basket.print_content()\n print(basket)\n print(\"Sum of Product IDs = {}\".format(basket.id_sum))\n\n\nif __name__ == \"__main__\":\n main()\n\n# Basket ID: 0 - total 23 products, space left=50, total value=41298, total weight=32077, ID sum=450166\n" } ]
4
CHAZICLE/MuseumGuard
https://github.com/CHAZICLE/MuseumGuard
4b581bee6bfacbb20b326649388f27b6a9d0b18e
c923bbc10c4898b2acbdaafbf07c5fb35089a094
7c54c995c386b773259455301abdfd74b95b80d7
refs/heads/master
2020-12-03T01:49:44.917633
2017-06-30T09:30:58
2017-06-30T09:30:58
95,872,536
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6909827589988708, "alphanum_fraction": 0.6919959187507629, "avg_line_length": 23.073171615600586, "blob_id": "f41e3c5a11e9d5fc144672204ec02470cad0b7dd", "content_id": "6a3ab5a50753766a0118c8e24fb755acb83b2c0f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 987, "license_type": "no_license", "max_line_length": 52, "num_lines": 41, "path": "/src/cpp/ai/path/PathFinder.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_AI_PATH_PATHFINDER_HPP_\n#define FYP_AI_PATH_PATHFINDER_HPP_\n\n#include \"PathCommon.h\"\n#include <set>\n#include <map>\n#include <mutex>\n#include <unordered_set>\n\nnamespace ai {\n\tnamespace path {\n\t\tstruct StoredPathNode {\n\t\t\tstruct PathNodeLink *parentLink;\n\t\t\tstruct StoredPathNode *parent;\n\t\t\tstruct PathNode *node;\n\t\t\tfloat f,g,h;\n\t\t};\n\t\tclass PathFinder {\n\t\t\tprivate:\n\t\t\t\tStoredPathNode *startPathNode,*endPathNode,*c;\n\t\t\tpublic:\n\t\t\t\tstd::unordered_set<glm::vec3> dangerPoints;\n\t\t\t\tfloat dangerRange,dangerCost;\n\t\t\t\tint iterations;\n\t\t\t\tstd::map<int, StoredPathNode *> storedPathNodes;\n\t\t\t\tstd::set<StoredPathNode *> openSet;\n\t\t\t\tstd::set<StoredPathNode *> closedSet;\n\t\t\t\tfloat calculateDangerOffset(PathNode *pathNode);\n\t\t\t\tPathFinder();\n\t\t\t\t~PathFinder();\n\t\t\t\tvoid start(PathNode *a, PathNode *b);\n\t\t\t\tbool tick(int i);\n\t\t\t\tstd::vector<int> getPath();\n\t\t\t\tStoredPathNode *getStoredNode(PathNode *node);\n\t\t\t\tPathNode *getCurrentNode();\n\t\t\t\tbool done,sucess;\n\t\t};\n\t}\n}\n\n#endif\n" }, { "alpha_fraction": 0.7464115023612976, "alphanum_fraction": 0.7464115023612976, "avg_line_length": 13.928571701049805, "blob_id": "5faedfd064c3730b18105e547819d9d94cd63f18", "content_id": "24d3617142836c3b9e6e1a16e62ddbbaddcc4d47", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 209, "license_type": "no_license", "max_line_length": 41, "num_lines": 14, "path": "/src/cpp/gui/screens/GraphicsMenu.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_GUI_SCREENS_GRAPHICSMENU_HPP_\n#define FYP_GUI_SCREENS_GRAPHICSMENU_HPP_\n\nclass GraphicsMenu;\n\n// Include dependencies\n\nclass GraphicsMenu {\n\tpublic:\n\t\tGraphicsMenu();\n\t\t~GraphicsMenu();\n};\n\n#endif\n" }, { "alpha_fraction": 0.8052173852920532, "alphanum_fraction": 0.8052173852920532, "avg_line_length": 37.33333206176758, "blob_id": "642dd2479e29504aa6fa0a38a0d3bf331d042575", "content_id": "c15dd58e060e5fef8d7586c199315229a365c31e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1150, "license_type": "no_license", "max_line_length": 128, "num_lines": 30, "path": "/src/cpp/gui/WindowScreenManager.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_GUI_WINDOWSCREENMANAGER_HPP_\n#define FYP_GUI_WINDOWSCREENMANAGER_HPP_\n\nstruct GLFWwindow;\n\n#include \"ScreenManager.hpp\"\n\nclass WindowScreenManager : public ScreenManager {\n\tpublic:\n\t\tGLFWwindow *window;\n\t\tint lastWindowWidthPx,lastWindowHeightPx,windowWidthPx,windowHeightPx,monitorWidthPx,monitorHeightPx,modeWidthPx,modeHeightPx;\n\t\tdouble scale;\n\t\tdouble lastX,lastY;\n\t\tdouble supportedCursorLastX,supportedCursorLastY,unsupportedCursorLastX,unsupportedCursorLastY;\n\t\tbool supportedCursorInit,unsupportedCursorInit,skipNextEvent,didSupportCursor;\n\tpublic:\n\t\tWindowScreenManager();\n\t\tvirtual ~WindowScreenManager();\n\t\tstatic WindowScreenManager *eventHandler;\n\t\tvirtual void close();\n\t\tstatic void onError(int error, const char *msg);\n\t\tstatic void onKeyEvent(GLFWwindow *window, int key, int scancode, int action, int mode);\n\t\tstatic void onCursorPosEvent(GLFWwindow *window, double x, double y);\n\t\tstatic void onMouseButtonEvent(GLFWwindow* window, int button, int action, int mods);\n\t\tstatic void onScrollEvent(GLFWwindow* window, double dx, double dy);\n\t\tvirtual void onSurfaceScreenChanged(Screen *screen);\n\t\tvoid run();\n};\n\n#endif\n" }, { "alpha_fraction": 0.7464115023612976, "alphanum_fraction": 0.7464115023612976, "avg_line_length": 13.928571701049805, "blob_id": "5e4f30409a6aa59567743a5eff5fc62ac64bceb7", "content_id": "6bd40edfde46766b334bf4e9edf213ad80585ea0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 209, "license_type": "no_license", "max_line_length": 41, "num_lines": 14, "path": "/src/cpp/gui/screens/ControlsMenu.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_GUI_SCREENS_CONTROLSMENU_HPP_\n#define FYP_GUI_SCREENS_CONTROLSMENU_HPP_\n\nclass ControlsMenu;\n\n// Include dependencies\n\nclass ControlsMenu {\n\tpublic:\n\t\tControlsMenu();\n\t\t~ControlsMenu();\n};\n\n#endif\n" }, { "alpha_fraction": 0.7196632027626038, "alphanum_fraction": 0.7241208553314209, "avg_line_length": 43.373626708984375, "blob_id": "e7fb5c693f1b72420fb0170b71946894a37068bd", "content_id": "ff183c7924378b160adb5110aaf638887f1482de", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4038, "license_type": "no_license", "max_line_length": 201, "num_lines": 91, "path": "/src/cpp/world/collisions/StaticMesh.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_WORLD_COLLISIONS_STATICMESH_HPP_\n#define FYP_WORLD_COLLISIONS_STATICMESH_HPP_\n\nnamespace world {\n\tnamespace collisions {\n\t\tclass StaticMesh;\n\t\ttypedef struct StaticMeshTriangle StaticMeshTriangle;\n//typedef struct StaticMeshCell StaticMeshCell;\n\t}\n}\n\n#include <vector>\n#include \"util/Boundaries.hpp\"\n#include \"render/StaticModel.hpp\"\n\n//debug\n#include \"render/RenderManager.hpp\"\n#include \"render/BasicShapes.hpp\"\n\nnamespace world {\n\tnamespace collisions {\n\t\tstruct StaticMeshCell {\n\t\t\tutil::Boundaries::AABB *bounds;\n\t\t\tint level;\n\t\t\tbool leaf;\n\t\t\tstd::vector<struct StaticMeshCell> children;\n\t\t\tstd::vector<struct StaticMeshTriangle> triangles;\n\t\t};\n\t\tstruct StaticMeshTriangle {\n\t\t\trender::StaticModel *model;\n\t\t\trender::StaticModelObject *modelObject;\n\t\t\tint triangleIndex;\n\t\t};\n\t\tclass StaticMesh {\n\t\t\tprivate:\n\t\t\t\tStaticMeshCell rootCell;\n\t\t\tprotected:\n\n\t\t\t\t// Adds a triangle to a cell and its children if needed\n\t\t\t\tvoid addTriangleToCell(StaticMeshTriangle &triangle, StaticMeshCell &cell, int cellLevel);\n\t\t\t\t// Adds a triangle to a cells children\n\t\t\t\tvoid addTriangleToCellChildren(StaticMeshTriangle &triangle, StaticMeshCell &parent, int childrenLevel);\n\n\t\t\t\t// Divides a cell into pieces\n\t\t\t\tvoid subdivideCell(StaticMeshCell &parent, int childrenLevel);\n\t\t\t\t// Adds a child cell to parent cell\n\t\t\t\tvoid addChildToCell(StaticMeshCell &parent, int childIndex, util::Boundaries::AABB *boxBounds);\n\n\t\t\t\t///////////////////////////////////\n\t\t\t\t// Collision Detection Functions //\n\t\t\t\t///////////////////////////////////\n\t\t\t\t\n\t\t\t\t// Raycasting functions\n\t\t\t\tStaticMeshTriangle *rayCastCellChildren(util::Boundaries::Raycast &raycast, StaticMeshCell &cell, util::Boundaries::RaycastResult &result);\n\t\t\t\tStaticMeshTriangle *rayCastCellTriangles(util::Boundaries::Raycast &raycast, StaticMeshCell &cell, util::Boundaries::RaycastResult &result);\n\t\t\tpublic:\n\t\t\t\tStaticMesh();\n\t\t\t\t~StaticMesh();\n\t\t\t\t//## Misc utils\n\t\t\t\tglm::vec3 getTriangleNormal(const StaticMeshTriangle &triangle);\n\t\t\t\t\n\t\t\t\t// Adds a static model to the static mesh collision checker\n\t\t\t\tvoid addStaticModel(render::StaticModel *model);\n\n\t\t\t\t// debug\n\t\t\t\tvoid render(render::RenderManager &rManager, util::Boundaries::AABB &selectionBox);\n\t\t\t\tvoid renderCellChildren(render::RenderManager &rManager, util::Boundaries::AABB &selectionBox, StaticMeshCell &cell);\n\t\t\t\tvoid render(render::RenderManager &rManager, util::Boundaries::Raycast &raycast);\n\t\t\t\tStaticMeshTriangle *renderCellChildren(render::RenderManager &rManager, util::Boundaries::Raycast &raycast, StaticMeshCell &cell, util::Boundaries::RaycastResult &result);\n\n\t\t\t\t///////////////////////////////////\n\t\t\t\t// Collision Detection Functions //\n\t\t\t\t///////////////////////////////////\n\n\t\t\t\t// Raycasting functions\n\t\t\t\tutil::Boundaries::RaycastResult rayCast(util::Boundaries::Raycast &raycast);\n\n\t\t\t\t// Intersection functions\n\t\t\t\tbool collisionResponse(render::RenderManager &rManager, const util::Boundaries::AABB &aabb, glm::vec3 *velocity);\n\t\t\t\tbool collisionResponseCellChildren(render::RenderManager &rManager, const util::Boundaries::AABB &aabb, const StaticMeshCell &parent, glm::vec3 *velocity);\n\n\t\t\t\tbool collisionResponse(const util::Boundaries::Sphere &sphere, float *interpolation, glm::vec3 *velocity, std::vector<glm::vec3> *normals);\n\t\t\t\tbool collisionResponseCellChildren(const util::Boundaries::Sphere &sphere, const StaticMeshCell &parent, float *interpolation, glm::vec3 *velocity, std::vector<glm::vec3> *normals);\n\t\t\t\tbool collisionResponseCellTriangles(const util::Boundaries::Sphere &sphere, const StaticMeshCell &cell, float *interpolation, glm::vec3 *velocity, std::vector<glm::vec3> *normals);\n\t\t\t\tbool collisionResponseCellTriangleEdge(const util::Boundaries::Sphere &sphere, float *interpolation, glm::vec3 *velocity, std::vector<glm::vec3> *normals, const glm::vec3 &V0, const glm::vec3 &V1);\n\t\t\t\tbool collisionResponseCellTriangleVertex(const util::Boundaries::Sphere &sphere, const glm::vec3 &P1, const glm::vec3 &V, float &enter, float &exit);\n\t\t};\n\t}\n}\n\n#endif\n" }, { "alpha_fraction": 0.7679426074028015, "alphanum_fraction": 0.7703348994255066, "avg_line_length": 27.827587127685547, "blob_id": "0423048868beb469e78a22f11ca8c64fe314a98d", "content_id": "cfac3ae288c72cc4e19150f241c8c965b4e8614e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 836, "license_type": "no_license", "max_line_length": 86, "num_lines": 29, "path": "/src/cpp/render/BasicShapes.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_RENDER_BASICSHAPES_HPP_\n#define FYP_RENDER_BASICSHAPES_HPP_\n\n#include <glm/glm.hpp>\n#include \"util/gl.h\"\n\nnamespace render {\n\tclass BasicShapes {\n\t\tprivate:\n\t\t\tstatic GLuint unitMeshArrayID,\n\t\t\t\t\tunitMeshVertexPositionBufferID,\n\t\t\t\t\tunitSquareIndexBufferID,\n\t\t\t\t\tunitCubeIndexBufferID,\n\t\t\t\t\tlineVertexArrayID,\n\t\t\t\t\tlineVertexBufferID,\n\t\t\t\t\tunitCubeFrameIndexBufferID\n\t\t\t\t;\n\t\t\tstatic void bindUnitMesh(GLuint vertexPositionPointer);\n\t\tpublic:\n\t\t\tstatic void init();\n\t\t\tstatic void renderUnitSquare(GLuint vertexPositionPointer);\n\t\t\tstatic void renderUnitCube(GLuint vertexPositionPointer);\n\t\t\tstatic void renderUnitCubeFrame(GLuint vertexPositionPointer);\n\t\t\tstatic void drawLine(glm::vec3 start, glm::vec3 end, GLuint vertexPositionPointer);\n\t\t\tstatic void drawPoint(float size, GLuint vertexPositionPointer);\n\t};\n}\n\n#endif\n" }, { "alpha_fraction": 0.6666666865348816, "alphanum_fraction": 0.6666666865348816, "avg_line_length": 13.25, "blob_id": "e595ce09e46ed56886683daaf1d266e6a75b0a5a", "content_id": "420474ce19e2b7fca307e48b53d649fe87437d13", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 57, "license_type": "no_license", "max_line_length": 26, "num_lines": 4, "path": "/src/cpp/render/shaders/code/test.c", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "//vertex\n#include \"common.h\"\n\n// this is the test.c file\n" }, { "alpha_fraction": 0.6991651058197021, "alphanum_fraction": 0.7165929079055786, "avg_line_length": 39.14468765258789, "blob_id": "e9c99b2a0f4fd09d62d6895d69a5e991e1417874", "content_id": "cbf048e125b1279d4686f09b2182090b40f2caa1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 21919, "license_type": "no_license", "max_line_length": 206, "num_lines": 546, "path": "/src/cpp/world/collisions/StaticMesh.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"tribox3.h\"\n#include \"render/StaticModel.hpp\"\n\n//debug\n#include \"render/shaders/ShaderProgram.hpp\"\n#include <iostream>\n\n#include \"StaticMesh.hpp\"\n\nusing namespace render;\nusing namespace world::collisions;\nusing namespace util::Boundaries;\n\n#define CELL_MAX_LEVEL 12\n#define CELL_MAX_TRIANGLES_PER_CELL 16\n\nStaticMesh::StaticMesh()\n{\n\t\n}\nvoid deleteCell(StaticMeshCell &cell)\n{\n\tfor(auto &c : cell.children)\n\t\tdeleteCell(c);\n\tdelete cell.bounds;\n}\nStaticMesh::~StaticMesh()\n{\n\t// Delete all the bounding boxes\n\tfor(auto &c : this->rootCell.children)\n\t\tdeleteCell(c);\n}\n// Adding a static model to the static mesh collision\nvoid StaticMesh::addStaticModel(render::StaticModel *model)\n{\n\tint objectCount = 0;\n\tStaticMeshTriangle triangle;\n\ttriangle.model = model;\n\tthis->rootCell.bounds = &model->getBounds();\n\tthis->rootCell.level = 0;\n\tthis->rootCell.leaf = true;\n\tthis->rootCell.children.clear();\n\tthis->rootCell.triangles.clear();\n\tfor(render::StaticModelObject *oj : model->objects)\n\t{\n\t\ttriangle.modelObject = oj;\n\t\tfor(int i=0;i<oj->numPrimitives;i++)\n\t\t{\n\t\t\t//std::cout << \"Begin Triangle\" << std::endl;\n\t\t\ttriangle.triangleIndex = i;\n\t\t\taddTriangleToCell(triangle, this->rootCell, 0);\n\t\t}\n\t\tobjectCount++;\n\t}\n}\nvoid StaticMesh::addTriangleToCell(StaticMeshTriangle &triangle, StaticMeshCell &cell, int cellLevel)\n{\n\tif(cell.leaf)\n\t{\n\t\tcell.triangles.push_back(triangle);\n\t\tif(cell.triangles.size()>CELL_MAX_TRIANGLES_PER_CELL && cell.level<=CELL_MAX_LEVEL)\n\t\t{\n\t\t\t//std::cout << \"\\t\" << cellLevel << \":S cellTriangleCount=\" << cellTriangleCount << std::endl;\n\t\t\t// Add triangles from this ex-leaf cell to child cells\n\t\t\tcell.leaf = false;\n\t\t\tsubdivideCell(cell, cellLevel);\n\t\t\tfor(auto tri : cell.triangles)\n\t\t\t{\n\t\t\t\taddTriangleToCellChildren(tri, cell, cellLevel+1);\n\t\t\t}\n\t\t\t// Non leaf nodes don't need triangles in them\n\t\t\tcell.triangles.clear();\n\t\t}\n\t}\n\telse\n\t{\n\t\t//std::cout << \"\\t\" << cellLevel << \":N cellTriangleCount=\" << cellTriangleCount << std::endl;\n\t\taddTriangleToCellChildren(triangle, cell, cellLevel+1);\n\t}\n\t//std::cout << \"\\t\" << cellLevel << \":T cellTriangleCount=\" << cell.triangles.size() << std::endl;\n}\nvoid StaticMesh::addTriangleToCellChildren(StaticMeshTriangle &triangle, StaticMeshCell &parent, int childrenLevel)\n{\n\tfor(StaticMeshCell &child : parent.children)\n\t{\n\t\t// Get the vertecies of the triangle\n\t\tGLfloat *vertexAPosition = &triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+0]];\n\t\tGLfloat *vertexBPosition = &triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+1]];\n\t\tGLfloat *vertexCPosition = &triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+2]];\n\t\tif(triBoxOverlap(child.bounds->boxCenter, child.bounds->boxHalfSize, vertexAPosition, vertexBPosition, vertexCPosition))\n\t\t{\n\t\t\tthis->addTriangleToCell(triangle, child, childrenLevel);\n\t\t}\n\t}\n}\n// Cell subdivision functions\nvoid StaticMesh::subdivideCell(StaticMeshCell &parent, int childrenLevel)\n{\n\t//std::cout << \"subdivide: Level:\" << childrenLevel << std::endl;\n#define X 0\n#define Y 1\n#define Z 2\n\t// Determine which should be split\n\tbool splitX = parent.bounds->boxHalfSize[X]*2>parent.bounds->boxHalfSize[Y] || parent.bounds->boxHalfSize[X]*2>parent.bounds->boxHalfSize[Z];\n\tbool splitY = parent.bounds->boxHalfSize[Y]*2>parent.bounds->boxHalfSize[X] || parent.bounds->boxHalfSize[Y]*2>parent.bounds->boxHalfSize[Z];\n\tbool splitZ = parent.bounds->boxHalfSize[Z]*2>parent.bounds->boxHalfSize[X] || parent.bounds->boxHalfSize[Z]*2>parent.bounds->boxHalfSize[Y];\n\t\n\t// Calculate the sizes of the new cells\n\tfloat boxSplitHalfSize[] = {\n\t\tsplitX ? parent.bounds->boxHalfSize[X]/2 : parent.bounds->boxHalfSize[X],\n\t\tsplitY ? parent.bounds->boxHalfSize[Y]/2 : parent.bounds->boxHalfSize[Y],\n\t\tsplitZ ? parent.bounds->boxHalfSize[Z]/2 : parent.bounds->boxHalfSize[Z]\n\t};\n\t// Calculate the centers of all the lower set of cells\n\tfloat boxSplitCenter1[] = {\n\t\tsplitX ? parent.bounds->boxCenter[X]-parent.bounds->boxHalfSize[X]/2: parent.bounds->boxCenter[X],\n\t\tsplitY ? parent.bounds->boxCenter[Y]-parent.bounds->boxHalfSize[Y]/2: parent.bounds->boxCenter[Y],\n\t\tsplitZ ? parent.bounds->boxCenter[Z]-parent.bounds->boxHalfSize[Z]/2: parent.bounds->boxCenter[Z],\n\t};\n\t// Calculate the centers of all the upper set of cells\n\tfloat boxSplitCenter2[] = {\n\t\tboxSplitCenter1[X]+parent.bounds->boxHalfSize[X],\n\t\tboxSplitCenter1[Y]+parent.bounds->boxHalfSize[Y],\n\t\tboxSplitCenter1[Z]+parent.bounds->boxHalfSize[Z]\n\t};\n\t\n\t\t\t\t\t\taddChildToCell(parent, childrenLevel, new AABB(boxSplitCenter1[X], boxSplitCenter1[Y], boxSplitCenter1[Z], boxSplitHalfSize[X], boxSplitHalfSize[Y], boxSplitHalfSize[Z]));\n\tif(splitZ)\t\t\t\taddChildToCell(parent, childrenLevel, new AABB(boxSplitCenter1[X], boxSplitCenter1[Y], boxSplitCenter2[Z], boxSplitHalfSize[X], boxSplitHalfSize[Y], boxSplitHalfSize[Z]));\n\tif(splitY)\t\t\t\taddChildToCell(parent, childrenLevel, new AABB(boxSplitCenter1[X], boxSplitCenter2[Y], boxSplitCenter1[Z], boxSplitHalfSize[X], boxSplitHalfSize[Y], boxSplitHalfSize[Z]));\n\tif(splitY && splitZ)\t\t\taddChildToCell(parent, childrenLevel, new AABB(boxSplitCenter1[X], boxSplitCenter2[Y], boxSplitCenter2[Z], boxSplitHalfSize[X], boxSplitHalfSize[Y], boxSplitHalfSize[Z]));\n\tif(splitX)\t\t\t\taddChildToCell(parent, childrenLevel, new AABB(boxSplitCenter2[X], boxSplitCenter1[Y], boxSplitCenter1[Z], boxSplitHalfSize[X], boxSplitHalfSize[Y], boxSplitHalfSize[Z]));\n\tif(splitX && splitZ)\t\t\taddChildToCell(parent, childrenLevel, new AABB(boxSplitCenter2[X], boxSplitCenter1[Y], boxSplitCenter2[Z], boxSplitHalfSize[X], boxSplitHalfSize[Y], boxSplitHalfSize[Z]));\n\tif(splitX && splitY)\t\t\taddChildToCell(parent, childrenLevel, new AABB(boxSplitCenter2[X], boxSplitCenter2[Y], boxSplitCenter1[Z], boxSplitHalfSize[X], boxSplitHalfSize[Y], boxSplitHalfSize[Z]));\n\tif(splitX && splitY && splitZ)\t\taddChildToCell(parent, childrenLevel, new AABB(boxSplitCenter2[X], boxSplitCenter2[Y], boxSplitCenter2[Z], boxSplitHalfSize[X], boxSplitHalfSize[Y], boxSplitHalfSize[Z]));\n}\nvoid StaticMesh::addChildToCell(StaticMeshCell &parent, int childLevel, AABB *boxBounds)\n{\n\tStaticMeshCell cell;\n\tcell.bounds = boxBounds;\n\tcell.level = childLevel;\n\tcell.leaf = true;\n\tcell.children.clear();\n\tcell.triangles.clear();\n\tparent.children.push_back(cell);\n}\n\n\n//## Debug rendering functions\nvoid StaticMesh::render(render::RenderManager &rManager, AABB &selectionBox)\n{\n\tthis->renderCellChildren(rManager, selectionBox, this->rootCell);\n}\nvoid StaticMesh::renderCellChildren(render::RenderManager &rManager, AABB &selectionBox, StaticMeshCell &cell)\n{\n\tif(!selectionBox.checkIntersect(*cell.bounds))\n\t\treturn;\n\tcell.bounds->render(rManager, glm::vec4(0.f, 1.f, (float)cell.level/CELL_MAX_LEVEL, 1.0f), false);\n\tfor(StaticMeshCell &c : cell.children)\n\t{\n\t\tthis->renderCellChildren(rManager, selectionBox, c);\n\t}\n}\nvoid StaticMesh::render(render::RenderManager &rManager, Raycast &raycast)\n{\n\tRaycastResult result;\n\tresult.distance = raycast.maxDistance<=0 ? std::numeric_limits<float>::max() : raycast.maxDistance;\n\tStaticMeshTriangle *triangle = this->renderCellChildren(rManager, raycast, this->rootCell, result);\n\tif(triangle!=0)\n\t{\n\t\tglm::vec3 &V0 = *(glm::vec3 *)&triangle->model->dataBuffer[triangle->model->dataBufferStride*triangle->modelObject->indecies[triangle->triangleIndex*3+0]];\n\t\tglm::vec3 &V1 = *(glm::vec3 *)&triangle->model->dataBuffer[triangle->model->dataBufferStride*triangle->modelObject->indecies[triangle->triangleIndex*3+1]];\n\t\tglm::vec3 &V2 = *(glm::vec3 *)&triangle->model->dataBuffer[triangle->model->dataBufferStride*triangle->modelObject->indecies[triangle->triangleIndex*3+2]];\n\n\t\trender::shaders::ShaderProgram *shader = rManager.useShader(SHADER_solidColor);\n\t\tglUniform4f(shader->getShaderLocation(true, SHADER_solidColor_solidColor), 0.7f, 0.7f, 0.7f, 0.7f);\n\n\t\tBasicShapes::drawLine(V0, V1, shader->getShaderLocation(false, SHADERVAR_vertex_position));\n\t\tBasicShapes::drawLine(V1, V2, shader->getShaderLocation(false, SHADERVAR_vertex_position));\n\t\tBasicShapes::drawLine(V0, V2, shader->getShaderLocation(false, SHADERVAR_vertex_position));\n\t}\n}\nStaticMeshTriangle *StaticMesh::renderCellChildren(render::RenderManager &rManager, Raycast &raycast, StaticMeshCell &cell, RaycastResult &result)\n{\n\tcell.bounds->render(rManager, glm::vec4(0.f, 1.f, (float)cell.level/CELL_MAX_LEVEL, 1.0f), false);\n\tStaticMeshTriangle *resultTriangle = 0,*triangleTmp=0;\n\tfloat cellDistance = 0;\n\tfor(StaticMeshCell &cell : cell.children)\n\t{\n\t\tcellDistance = cell.bounds->rayCastDistance(raycast);\n\t\tif(cell.bounds->checkInside(raycast.origin) || (cellDistance!=-1 && cellDistance<result.distance))\n\t\t{\n\t\t\tif(cell.leaf)\n\t\t\t\ttriangleTmp = this->rayCastCellTriangles(raycast, cell, result);\n\t\t\telse\n\t\t\t\ttriangleTmp = this->renderCellChildren(rManager, raycast, cell, result);\n\t\t\tif(triangleTmp!=0)\n\t\t\t\tresultTriangle = triangleTmp;\n\t\t}\n\t}\n\treturn resultTriangle;\n}\n\n\n//##Raycasting functions\nRaycastResult StaticMesh::rayCast(Raycast &raycast)\n{\n\tRaycastResult result;\n\tresult.hit = false;\n\tresult.distance = raycast.maxDistance<=0 ? std::numeric_limits<float>::max() : raycast.maxDistance;\n\tthis->rayCastCellChildren(raycast, this->rootCell, result);\n\t//result.hit = result.distance>=0 && result.distance<raycast.maxDistance;\n\treturn result;\n}\nStaticMeshTriangle *StaticMesh::rayCastCellChildren(Raycast &raycast, StaticMeshCell &cell, RaycastResult &result)\n{\n\tStaticMeshTriangle *resultTriangle = 0,*triangleTmp=0;\n\tfloat cellDistance = 0;\n\tfor(StaticMeshCell &cell : cell.children)\n\t{\n\t\tcellDistance = cell.bounds->rayCastDistance(raycast);\n\t\tif(cell.bounds->checkInside(raycast.origin) || (cellDistance!=-1 && cellDistance<result.distance))\n\t\t{\n\t\t\tif(cell.leaf)\n\t\t\t\ttriangleTmp = this->rayCastCellTriangles(raycast, cell, result);\n\t\t\telse\n\t\t\t\ttriangleTmp = this->rayCastCellChildren(raycast, cell, result);\n\t\t\tif(triangleTmp!=0)\n\t\t\t\tresultTriangle = triangleTmp;\n\t\t}\n\t}\n\treturn resultTriangle;\n}\nStaticMeshTriangle *StaticMesh::rayCastCellTriangles(Raycast &raycast, StaticMeshCell &cell, RaycastResult &result)\n{\n\tStaticMeshTriangle *resultTriangle = 0;\n\tfor(StaticMeshTriangle &triangle : cell.triangles)\n\t{\n\t\tglm::vec3 &P0 = raycast.origin;\n\t\tglm::vec3 &P1 = raycast.direction;\n\t\tglm::vec3 &V0 = *(glm::vec3 *)&triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+0]];\n\t\tglm::vec3 &V1 = *(glm::vec3 *)&triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+1]];\n\t\tglm::vec3 &V2 = *(glm::vec3 *)&triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+2]];\n\t\tglm::vec3 u = V1-V0;\n\t\tglm::vec3 v = V2-V0;\n\t\tglm::vec3 n = glm::cross(u,v);\n\t\t// Determine where the ray is hitting the plane\n\t\tfloat rI_d = glm::dot(n, P1);\n\t\tif(rI_d==0)//Ray is parallel\n\t\t\tcontinue;\n\t\tfloat rI = glm::dot(n, V0-P0)/rI_d;\n\t\tif(result.distance<rI)//Ray is further away\n\t\t\tcontinue;\n\t\t// Is the projected point on the plane within the triangle?\n\t\tglm::vec3 w = (P0+P1*rI)-V0;\n\t\tfloat dotUV = glm::dot(u,v);\n\t\tfloat dotWU = glm::dot(w,u);\n\t\tfloat dotWV = glm::dot(w,v);\n\t\tfloat dotVV = glm::dot(v,v);\n\t\tfloat dotUU = glm::dot(u,u);\n\t\tfloat _I_d = dotUV*dotUV-dotUU*dotVV;\n\t\tfloat sI = (dotUV*dotWV-dotVV*dotWU)/_I_d;\n\t\tfloat tI = (dotUV*dotWU-dotUU*dotWV)/_I_d;\n\t\tif(sI>=0 && tI>=0 && sI+tI<=1)\n\t\t{\n\t\t\t//std::cout << \"SUCC rI=\" << rI << \"sI=\" << sI << \", tI=\" << tI << \", sI+tI=\" << (sI+tI) << std::endl;\n\t\t\tif(rI>0 && result.distance>rI)\n\t\t\t{\n\t\t\t\tglm::vec3 &N0 = *(glm::vec3 *)&triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+0]+triangle.model->dataBufferNormalsOffset];\n\t\t\t\tglm::vec3 &N1 = *(glm::vec3 *)&triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+1]+triangle.model->dataBufferNormalsOffset];\n\t\t\t\tglm::vec3 &N2 = *(glm::vec3 *)&triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+2]+triangle.model->dataBufferNormalsOffset];\n\t\t\t\tresult.hitNormal = N0*(1-(sI+tI))+N1*sI+N2*tI;\n\t\t\t\tresult.distance = rI;\n\t\t\t\tresult.hit = true;\n\t\t\t\t//result.hitNormal = n;\n\t\t\t\t//std::cout << \"Total:\" << (1-(sI+tI))+sI+tI << std::endl;\n\t\t\t\t//if(glm::dot(result.hitNormal, P1)>0)//Ensure facing towards the ray origin\n\t\t\t\t//\tresult.hitNormal = -result.hitNormal;\n\t\t\t}\n\t\t}\n\t}\n\treturn resultTriangle;\n}\nglm::vec3 StaticMesh::getTriangleNormal(const StaticMeshTriangle &triangle)\n{\n\tglm::vec3 &N0 = *(glm::vec3 *)&triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+0]+triangle.model->dataBufferNormalsOffset];\n\tglm::vec3 &N1 = *(glm::vec3 *)&triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+1]+triangle.model->dataBufferNormalsOffset];\n\tglm::vec3 &N2 = *(glm::vec3 *)&triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+2]+triangle.model->dataBufferNormalsOffset];\n\treturn glm::normalize((N0+N1+N2));\n}\nbool StaticMesh::collisionResponse(render::RenderManager &rManager, const AABB &aabb, glm::vec3 *velocity)\n{\n\t//TODO: Complete this\n\trManager.M = glm::mat4(1.0f);\n\trManager.markMDirty();\n\tif(!this->rootCell.bounds->checkIntersect(aabb))\n\t\treturn false;\n\t//rootCell.bounds->render(rManager, glm::vec4(0.f, 1.f, (float)rootCell.level/CELL_MAX_LEVEL, 1.0f), false);\n\treturn this->collisionResponseCellChildren(rManager, aabb, this->rootCell, velocity);\n}\nbool StaticMesh::collisionResponseCellChildren(render::RenderManager &rManager, const AABB &aabb, const StaticMeshCell &parent, glm::vec3 *velocity)\n{\n\t//TODO: Complete this\n\t//parent.bounds->render(rManager, glm::vec4(0.f, 1.f, (float)parent.level/CELL_MAX_LEVEL, 1.0f), false);\n\tbool intersection = false;\n\tfor(const StaticMeshCell &cell : parent.children)\n\t{\n\t\tif(cell.leaf)\n\t\t{\n\t\t\tfor(const StaticMeshTriangle &triangle : cell.triangles)\n\t\t\t{\n\t\t\t\tGLfloat *vertexAPosition = &triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+0]];\n\t\t\t\tGLfloat *vertexBPosition = &triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+1]];\n\t\t\t\tGLfloat *vertexCPosition = &triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+2]];\n\t\t\t\tAABB aabb2 = aabb;\n\t\t\t\tif(triBoxOverlap(aabb2.boxCenter, aabb2.boxHalfSize, vertexAPosition, vertexBPosition, vertexCPosition))\n\t\t\t\t{\n\t\t\t\t\t//std::cout << \"TRIANGLE INTERSECT\" << std::endl;\n\t\t\t\t\tif(velocity==0)\n\t\t\t\t\t{\n\t\t\t\t\t\taabb2.render(rManager, glm::vec4(0.f, 1.f, 1.f, 1.0f), false);\n\t\t\t\t\t\treturn true;\n\t\t\t\t\t}\n\t\t\t\t\telse\n\t\t\t\t\t{\n\t\t\t\t\t\tglm::vec3 normal = getTriangleNormal(triangle);\n\t\t\t\t\t\tif(glm::dot(normal, *velocity)<0)\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\t*velocity -= (normal*glm::dot(*velocity, normal));\n\t\t\t\t\t\t\tintersection = true;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\telse\n\t\t{\n\t\t\tif(cell.bounds->checkIntersect(aabb) && this->collisionResponseCellChildren(rManager, aabb, cell, velocity))\n\t\t\t{\n\t\t\t\t//if(velocity==0)\n\t\t\t\t//\treturn true;//Don't need to bother to check everything\n\t\t\t\tintersection = true;\n\t\t\t}\n\t\t}\n\t}\n\treturn intersection;\n}\n#define COLLISION_RESPONSE_EPSILON 0.001f\nbool StaticMesh::collisionResponse(const Sphere &sphere, float *interpolation, glm::vec3 *velocity, std::vector<glm::vec3> *normals)\n{\n\tif(!this->rootCell.bounds->checkIntersect(sphere))\n\t\treturn false;\n\tSphere sp = sphere;\n\tsp.center += *velocity;\n\tif(!this->rootCell.bounds->checkIntersect(sp))\n\t\treturn false;\n\treturn this->collisionResponseCellChildren(sphere, this->rootCell, interpolation, velocity, normals);\n}\nextern glm::vec3 debug_point;\nextern glm::vec3 debug_point2;\nextern glm::vec3 debug_point3;\ninline bool getQuadraticRoots(const float a, const float b, const float c, float &t0, float &t1)\n{\n\tfloat desc = b*b-(4.f*a*c);\n\tif(desc<0.0f)\n\t\treturn false;\n\tfloat descSqrt = std::sqrt(desc);\n\tt0 = (-b-descSqrt)/(2.f*a);\n\tt1 = (-b+descSqrt)/(2.f*a);\n\tif(t0>t1)\n\t{\n\t\tfloat temp(t0); t0 = t1; t1 = temp;\n\t}\n\treturn (t1>=0.0f && t0<=1.0f);\n}\nbool StaticMesh::collisionResponseCellChildren(const Sphere &sphere, const StaticMeshCell &parent, float *interpolation, glm::vec3 *velocity, std::vector<glm::vec3> *normals)\n{\n\tbool intersection = false;\n\tfor(const StaticMeshCell &cell : parent.children)\n\t{\n\t\tif(cell.leaf)\n\t\t{\n\t\t\tthis->collisionResponseCellTriangles(sphere, cell, interpolation, velocity, normals);\n\t\t}\n\t\telse\n\t\t{\n\t\t\tif(cell.bounds->checkIntersect(sphere) && this->collisionResponseCellChildren(sphere, cell, interpolation, velocity, normals))\n\t\t\t{\n\t\t\t\t//if(velocity==0)\n\t\t\t\t//\treturn true;//Don't need to bother to check everything\n\t\t\t\tintersection = true;\n\t\t\t}\n\t\t}\n\t}\n\treturn intersection;\n}\n//v -= s; \n#define PROCESS_VELOCITY(s,v,n,d) \\\n\t\t\t\ts = v*d; \\\n\t\t\t\tv -= s; \\\n\t\t\t\tv -= (n*glm::dot(v, n)); \\\n\t\t\t\ts += v;\nbool StaticMesh::collisionResponseCellTriangles(const Sphere &sphere, const StaticMeshCell &cell, float *interpolation, glm::vec3 *velocity, std::vector<glm::vec3> *normals)\n{\n\tfor(const StaticMeshTriangle &triangle : cell.triangles)\n\t{\n\t\t// Get triangle\n\t\tglm::vec3 &V0 = *(glm::vec3 *)&triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+0]];\n\t\tglm::vec3 &V1 = *(glm::vec3 *)&triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+1]];\n\t\tglm::vec3 &V2 = *(glm::vec3 *)&triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+2]];\n\t\t// Get triangle edges + normal\n\t\tglm::vec3 u = V1-V0;\n\t\tglm::vec3 v = V2-V0;\n\t\tglm::vec3 normal = glm::normalize(glm::cross(u,v));\n\n\t\tfloat f = glm::dot(normal, sphere.center-V0);\n\t\tif(f>0)\n\t\t\tnormal = -normal;\n\t\tif(glm::dot(normal, this->getTriangleNormal(triangle))>COLLISION_RESPONSE_EPSILON)\n\t\t\tcontinue;\n\t\tglm::vec3 P0 = sphere.center+normal*sphere.radius;\n\t\t// Determine where the ray is hitting the plane\n\t\tfloat rI_d = glm::dot(normal, *velocity);//Normalized velocity length along the normal\n\t\tif(rI_d<1.0E-8)//Ray is parallel\n\t\t\tcontinue;\n\n\t\t//// Check if the closest part of the sphere will hit the surface\n\t\tfloat r = glm::dot(normal, V0-P0)/rI_d;\n\t\tbool hit = false;\n\t\tif(r>=-1.0f-COLLISION_RESPONSE_EPSILON && r<=1.0f+COLLISION_RESPONSE_EPSILON)//Triangle is behind minimum and hitting this tick\n\t\t{\n\t\t\t// Is the projected point on the plane within the triangle?\n\t\t\tglm::vec3 w = (P0+*velocity*(r))-V0;\n\t\t\tfloat dotWU = glm::dot(w,u);\n\t\t\tfloat dotWV = glm::dot(w,v);\n\t\t\tfloat dotUV = glm::dot(u,v);\n\t\t\tfloat dotVV = glm::dot(v,v);\n\t\t\tfloat dotUU = glm::dot(u,u);\n\t\t\tfloat _I_d = dotUV*dotUV-dotUU*dotVV;\n\t\t\tfloat s = (dotUV*dotWV-dotVV*dotWU)/_I_d;\n\t\t\tfloat t = (dotUV*dotWU-dotUU*dotWV)/_I_d;\n\t\t\tif(s>=0 && t>=0 && s+t<=1)//If within triangle\n\t\t\t{\n\t\t\t\tif(r<*interpolation)\n\t\t\t\t{\n\t\t\t\t\t*interpolation = r;\n\t\t\t\t}\n\t\t\t\t//if(std::fabs(r-*interpolation)<COLLISION_RESPONSE_EPSILON)\n\t\t\t\tnormals->push_back(normal);\n\t\t\t\thit = true;\n\t\t\t}\n\t\t}\n\t\tif(!hit)\n\t\t{\n\t\t\tif(this->collisionResponseCellTriangleEdge(sphere, interpolation, velocity, normals, V0, V1)) continue;\n\t\t\tif(this->collisionResponseCellTriangleEdge(sphere, interpolation, velocity, normals, V0, V2)) continue;\n\t\t\tif(this->collisionResponseCellTriangleEdge(sphere, interpolation, velocity, normals, V1, V2)) continue;\n\t\t}\n\t}\n\treturn false;\n}\nbool StaticMesh::collisionResponseCellTriangleEdge(const Sphere &sphere, float *interpolation, glm::vec3 *velocity, std::vector<glm::vec3> *normals, const glm::vec3 &V0, const glm::vec3 &V1)\n{\n\tglm::vec3 AB = V1-V0;//d\n\tglm::vec3 AO = sphere.center-V0;//P\n\tglm::vec3 AOxAB = glm::cross(AO, AB);//pxd\n\tglm::vec3 VxAB = glm::cross(*velocity, AB);//vxd\n\tfloat r2 = sphere.radius*sphere.radius;\n\tfloat d2 = glm::dot(AB, AB);\n\tfloat v2 = glm::dot(VxAB, VxAB);\n\tfloat p2 = glm::dot(AOxAB, AOxAB);\n\tfloat pv = glm::dot(AOxAB, VxAB);\n\tfloat a = v2;\n\tfloat b = pv*2.0f;\n\tfloat c = p2-(r2*d2);\n\n\tfloat enter,exit;\n\n\tif(a<1.0E-8f)\n\t{\n\t\tif(c>0.0f)\n\t\t{\n\t\t\treturn false;\n\t\t}\n\t\tenter = exit = 0.0f;\n\t}\n\telse if(!getQuadraticRoots(a,b,c, enter, exit))\n\t{\n\t\treturn false;\n\t}\n\tif(enter<0.f)\n\t{\n\t\treturn false;\n\t}\n\tglm::vec3 newSphereCenter = sphere.center+*velocity*enter;\n\tfloat f = glm::dot(newSphereCenter-V0,AB)/glm::dot(AB,AB);\n\tif(f>=0.f-COLLISION_RESPONSE_EPSILON && f<=1.f+COLLISION_RESPONSE_EPSILON)\n\t{\n\t\tglm::vec3 edgeHitPoint = V0+AB*f;\n\t\tglm::vec3 normal = glm::normalize(newSphereCenter-edgeHitPoint);\n\t\tif(enter<*interpolation)\n\t\t{\n\t\t\t*interpolation = enter;\n\t\t}\n\t\t//if(std::fabs(enter-*interpolation)<COLLISION_RESPONSE_EPSILON)\n\t\tnormals->push_back(normal);\n\t\treturn true;\n\t}\n\telse if(f<0)\n\t{\n\t\tif(!this->collisionResponseCellTriangleVertex(sphere, *velocity, V0, enter, exit))\n\t\t\treturn false;\n\t\tif(enter<0.f)\n\t\t\tenter = 0.f;\n\t\tglm::vec3 normal = glm::normalize((sphere.center+*velocity*enter)-V0);\n\t\tif(enter<=*interpolation)\n\t\t{\n\t\t\t*interpolation = enter;\n\t\t}\n\t\t//if(std::fabs(enter-*interpolation)<COLLISION_RESPONSE_EPSILON)\n\t\tnormals->push_back(normal);\n\t\treturn true;\n\t}\n\treturn false;\n}\nbool StaticMesh::collisionResponseCellTriangleVertex(const Sphere &sphere, const glm::vec3 &P1, const glm::vec3 &V, float &enter, float &exit)\n{\n\tglm::vec3 p = sphere.center-V;\n\tfloat r2 = sphere.radius*sphere.radius;\n\tfloat v2 = glm::dot(P1, P1);\n\tfloat p2 = glm::dot(p,p);\n\tfloat pv = glm::dot(p,P1);\n\t\n\tfloat a = v2;\n\tfloat b = pv*2.0f;\n\tfloat c = p2-r2;\n\t\n\tif(a<1.0E-8f)\n\t{\n\t\tif(c>0.0f)\n\t\t\treturn false;\n\t\tenter = 0.0f;\n\t\texit = 0.0f;\n\t\treturn true;\n\t}\n\treturn getQuadraticRoots(a,b,c, enter, exit);\n}\n" }, { "alpha_fraction": 0.7333333492279053, "alphanum_fraction": 0.7355555295944214, "avg_line_length": 19.454545974731445, "blob_id": "347568185e048eb180637663669d108657d8eeb9", "content_id": "f44b9f7ca9493a98e240d288429028cf9bbb279f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 450, "license_type": "no_license", "max_line_length": 82, "num_lines": 22, "path": "/src/cpp/gui/elements/Button.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_GUI_ELEMENTS_BUTTON_HPP_\n#define FYP_GUI_ELEMENTS_BUTTON_HPP_\n\nnamespace render {\n\tclass Font;\n}\n\n#include \"gui/Element.hpp\"\n#include <string>\n\nclass Button : public Element {\n\tprivate:\n\t\tglm::vec4 backgroundColor,selectedBackgroundColor;\n\t\tstd::string text;\n\tpublic:\n\t\tButton(std::string text);\n\t\t~Button();\n\t\tvirtual void render(util::DeltaTime &deltaTime, render::RenderManager &manager);\n\t\tstatic render::Font *buttonFont;\n};\n\n#endif\n" }, { "alpha_fraction": 0.682692289352417, "alphanum_fraction": 0.7163461446762085, "avg_line_length": 22.11111068725586, "blob_id": "1777dae8ad9db6fa294b52e6ad80fa9a00159262", "content_id": "e8d1fc85bfe008502586b7ad1108207d4e0856cd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 416, "license_type": "no_license", "max_line_length": 84, "num_lines": 18, "path": "/src/cpp/render/shaders/code/billboard_vertex.c", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "//vertex\n\nimport in vec3 vertex_position;\n\nimport uniform vec3 billboard_center;\nimport uniform vec2 screen_scaler;\n\nimport uniform mat4 matrix_V;\nimport uniform mat4 matrix_P;\n\nout vec2 UV;\n\nvoid main() {\n\tgl_Position = matrix_P * matrix_V * vec4(billboard_center, 1.f);\n\tgl_Position /= gl_Position.w;\n\tgl_Position.xy += (vertex_position.xy*vec2(2.f, 2.f)-vec2(1.f, 1.f))*screen_scaler;\n\tUV = vertex_position.xy;\n}\n" }, { "alpha_fraction": 0.739949107170105, "alphanum_fraction": 0.747073769569397, "avg_line_length": 27.897058486938477, "blob_id": "d0b90f2cb0e275a94eb5bcde6ea4a629cf477cf3", "content_id": "1afb0e341143a0f37c6b8de83ba91e6f62781a70", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1965, "license_type": "no_license", "max_line_length": 123, "num_lines": 68, "path": "/src/cpp/world/entities/Enemy.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include <glm/gtc/matrix_transform.hpp>\n#include <iostream>\n#include \"util/gl.h\"\n#include \"ai/ObjectiveManager.hpp\"\n\n#include \"render/RenderManager.hpp\"\n#include \"render/SkeletalModel.hpp\"\n#include \"render/SkeletalAnimation.hpp\"\n#include \"world/World.hpp\"\n\n#include \"Enemy.hpp\"\n\nusing namespace render;\nusing namespace world;\nusing namespace world::entities;\nusing namespace ai;\nusing namespace ai::path;\n\nEnemy::Enemy(World *world) : super()\n{\n\tthis->animTime = 0;\n\tthis->objectiveManager = new ObjectiveManager(world, this, world->world_navigation_graph);\n\tthis->setBulletHealth(1.f, 10);\n}\nEnemy::~Enemy()\n{\n\tdelete this->objectiveManager;\n}\nvoid Enemy::tick(util::DeltaTime &deltaTime)\n{\n\tthis->animTime = deltaTime.getTime();\n\tthis->objectiveManager->tick(deltaTime);\n}\nvoid Enemy::render(render::RenderManager &rManager)\n{\n\t// Render cube\n\t/*\n\tshaders::ShaderProgram *shader = rManager.useShader(SHADER_solidColor);\n\tGLint vploc = shader->getShaderLocation(false, SHADERVAR_vertex_position);\n\tBasicShapes::renderUnitCube(vploc);\n\t*/\n\trManager.pushMatrixM();\n\n\trManager.M = glm::scale(glm::translate(glm::mat4(1.0f), this->getPosition()), glm::vec3(1,1,1));\n\trManager.markMDirty();\n\n\trManager.useShader(SHADER_fuzzyModel);\n\n\tSkeletalModel *drone = (SkeletalModel *)util::AssetManager::getAssetManager()->getAsset(ASSET_ENEMY_MD5MESH);\n\tSkeletalAnimation *drone_anim = (SkeletalAnimation *)util::AssetManager::getAssetManager()->getAsset(ASSET_ENEMY_MD5ANIM);\n\n\tint frame = drone_anim->getFrame(animTime);\n\tdrone->render(rManager, drone_anim->getFrameSkeleton(frame));\n\tthis->bounds = &drone_anim->getFrameBounds(frame);\n\t//drone_anim->render(rManager, *drone, animTime);\n\n\trManager.popMatrixM();\n\t//drone_anim->renderBounds(rManager, animTime);\n\t//\n\tthis->objectiveManager->render(rManager);\n}\nvoid Enemy::setPath(std::vector<int> path)\n{\n}\nvoid Enemy::die(double time, glm::vec3 direction, int type)\n{\n\tthis->getWorld().gameOver(GAME_OVER_ENEMY_DESTROYED);\n}\n" }, { "alpha_fraction": 0.6767676472663879, "alphanum_fraction": 0.6818181872367859, "avg_line_length": 13.142857551574707, "blob_id": "220489f0d14a7a38a4fb1a230f0ca2af91d51e15", "content_id": "9b81da794338d7f5e9a27c168c6ebc22e7d64a2f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 198, "license_type": "no_license", "max_line_length": 28, "num_lines": 14, "path": "/src/cpp/util/gl.h", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_UTIL_GL_H_\n#define FYP_UTIL_GL_H_\n\n#ifdef YCM\n\t#define GL_GLEXT_PROTOTYPES\n\t#include <GL/gl.h>\n\t#include <GL/glext.h>\n#else\n\t#include <GL/glew.h>\n#endif\n\n#include <GLFW/glfw3.h>\n\n#endif\n" }, { "alpha_fraction": 0.7414634227752686, "alphanum_fraction": 0.7414634227752686, "avg_line_length": 13.642857551574707, "blob_id": "3373bbca2a22323b29cd2459696ea03936550072", "content_id": "b222f6a3456c4051cc5185aefb072666fb46b540", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 205, "license_type": "no_license", "max_line_length": 41, "num_lines": 14, "path": "/src/cpp/gui/elements/ProgressBar.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_GUI_ELEMENTS_PROGRESSBAR_HPP_\n#define FYP_GUI_ELEMENTS_PROGRESSBAR_HPP_\n\nclass ProgressBar;\n\n// Include dependencies\n\nclass ProgressBar {\n\tpublic:\n\t\tProgressBar();\n\t\t~ProgressBar();\n};\n\n#endif\n" }, { "alpha_fraction": 0.7692307829856873, "alphanum_fraction": 0.7692307829856873, "avg_line_length": 25, "blob_id": "16d5fb4d21c03b78a4a10655da64ce287bc9110d", "content_id": "d272f76933a1539363f62e9ad9249651854d5982", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 26, "license_type": "no_license", "max_line_length": 25, "num_lines": 1, "path": "/src/cpp/util/DebugFlags.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"DebugFlags.hpp\"\n" }, { "alpha_fraction": 0.6754221320152283, "alphanum_fraction": 0.6754221320152283, "avg_line_length": 19.5, "blob_id": "dcff6845336af40515f9361c80ac15b2478a2eb9", "content_id": "fb028b80befebe44cf21c96484e3d6c1d0981c3d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1599, "license_type": "no_license", "max_line_length": 85, "num_lines": 78, "path": "/src/cpp/gui/ScreenManager.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_GUI_SCREENMANAGER_HPP_\n#define FYP_GUI_SCREENMANAGER_HPP_\n\nclass Screen;\nnamespace util {\nclass DeltaTime;\n}\nnamespace render {\nclass RenderManager;\n}\n\n#include <glm/glm.hpp>\n#include <list>\n\nclass ScreenManager {\n\tprivate:\n\t\tstd::list<Screen *> screens;\n\t\tdouble lastCursorX,lastCursorY;\n\tprotected:\n\t\tdouble width;\n\t\tdouble height;\n\tpublic:\n\t\tScreenManager();\n\t\tvirtual ~ScreenManager();\n\n\t\t// GUI Controls\n\t\t/**\n\t\t * Opens a screen at the base of the stack\n\t\t */\n\t\tvoid openRootScreen(Screen *screen);\n\t\t/**\n\t\t * Opens a screen over the existing screens\n\t\t */\n\t\tvoid openScreen(Screen *screen);\n\t\t/**\n\t\t * Exits the screen manager\n\t\t */\n\t\tvirtual void close();\n\t\t/**\n\t\t * Returns true if the screen is a surface screen\n\t\t */\n\t\tbool isScreenSurface(Screen *screen);\n\t\t/**\n\t\t * Returns true if the screen is at the base\n\t\t */\n\t\tbool isScreenBase(Screen *screen);\n\t\t/**\n\t\t * Closes the selected screen\n\t\t */\n\t\tScreen *closeScreen(Screen *screen);\n\n\t\t// Events\n\t\t/**\n\t\t * On control boolean update\n\t\t */\n\t\tvirtual bool onControlEvent(int control, int action);\n\t\t/**\n\t\t * On control delta update\n\t\t */\n\t\tvirtual bool onControlEvent(int control, double x, double y, double dx, double dy);\n\t\t/**\n\t\t * Called by the screen manager whenever the screen resizes\n\t\t */\n\t\tvirtual void onScreenResize();\n\t\t/**\n\t\t * Called by ScreenManager when the surface screen changes\n\t\t */\n\t\tvirtual void onSurfaceScreenChanged(Screen *screen);\n\n\t\t/**\n\t\t * Render the screens\n\t\t */\n\t\tvoid render(util::DeltaTime &deltaTime, render::RenderManager &manager);\n\t\tdouble getWidth();\n\t\tdouble getHeight();\n};\n\n#endif\n" }, { "alpha_fraction": 0.6842105388641357, "alphanum_fraction": 0.6842105388641357, "avg_line_length": 8.5, "blob_id": "d5c7376fd7ccd7c028d7514eb7639589adb6975a", "content_id": "4117e2f26e261575760e2af70db23bcbf4eadc46", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 95, "license_type": "no_license", "max_line_length": 27, "num_lines": 10, "path": "/src/cpp/gui/elements/ProgressBar.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"ProgressBar.hpp\"\n\nProgressBar::ProgressBar()\n{\n\t\n}\nProgressBar::~ProgressBar()\n{\n\t\n}\n" }, { "alpha_fraction": 0.7272727489471436, "alphanum_fraction": 0.7299205660820007, "avg_line_length": 25.9761905670166, "blob_id": "6cb1b6639c0074c170e1a3024ba673ebe3b7f927", "content_id": "723e844056a2ae334adf417ae2f9553f711c7c73", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1133, "license_type": "no_license", "max_line_length": 101, "num_lines": 42, "path": "/src/cpp/ai/path/NavigationGraph.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_AI_PATH_NAVIGATIONGRAPH_HPP_\n#define FYP_AI_PATH_NAVIGATIONGRAPH_HPP_\n\n#include \"PathCommon.h\"\n\n#include <vector>\n#include <set>\n\n#include \"util/AssetManager.hpp\"\n\n#include \"render/RenderManager.hpp\"\n\n\nnamespace ai {\n\tnamespace path {\n\t\tclass NavigationGraph : public util::Asset {\n\t\t\tpublic:\n\t\t\t\tunsigned int numGroups;\n\t\t\t\tunsigned int numNodes;\n\t\t\t\tunsigned int numNodeLinks;\n\t\t\t\tint *groupCounts;\n\t\t\t\tint *groupMasks;\n\t\t\t\tPathNode *nodes;\n\t\t\t\tNavigationGraph(int assetId, std::istream &fp);\n\t\t\t\tvirtual ~NavigationGraph();\n\t\t\t\tvirtual void write(std::ostream &ost) const;\n\t\t\t\tvirtual void postload();\n\t\t\t\tvoid render(render::RenderManager &manager);\n\t\t\t\tPathNode *getRandomNode(int mask);\n\t\t\t\tPathNode *getNearestPathNode(glm::vec3 position, int mask);\n\t\t\t\tPathNode *getNearestPathNode(glm::vec3 position);\n\t\t\t\tPathNode *getNearestPathNodeFromPool(glm::vec3 position, std::set<int> &pool);\n\t\t\t\tint getPathNodeGroupMask(int id);\n\t\t\tprivate:\n\t\t\t\t// Debug\n\t\t\t\tGLuint vertexArrayId,vertexPositionBufferId,vertexColorBufferID,indexBufferID,indexColorBufferID;\n\t\t\t\tGLfloat *vertexColorData,*linkColorData;\n\t\t};\n}\n}\n\n#endif\n" }, { "alpha_fraction": 0.7057220935821533, "alphanum_fraction": 0.7084468603134155, "avg_line_length": 28.360000610351562, "blob_id": "a8449fa32bb6da9e519f4c83678a3336f6c75afb", "content_id": "bd1b07720f0ccbbb520d4bf7170e8c8a665e15b5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1468, "license_type": "no_license", "max_line_length": 136, "num_lines": 50, "path": "/src/cpp/render/DDSImage.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"util/Globals.hpp\"\n#include <istream>\n#include <cstring>\n#include \"util/gl.h\"\n#include \"util/StreamUtils.hpp\"\n#include <SOIL/SOIL.h>\n\n#include \"DDSImage.hpp\"\n\nusing namespace render;\nusing namespace util::StreamUtils;\n\nDDSImage::DDSImage(int assetId, std::istream &fp) : util::Asset(assetId)\n{\n\tthis->setName(readString(fp));\n\t\n\t// Read DDS from asset stream\n\tthis->imageDataSize = readInt(fp);\n\tthis->imageData = new unsigned char[this->imageDataSize];\n\tfp.read((char *)this->imageData, this->imageDataSize*sizeof(char));\n}\nDDSImage::~DDSImage()\n{\n\t\n}\nvoid DDSImage::write(std::ostream &ost) const\n{\n\tost << \"[\" << this->getAssetID() << \":\" << this->getName() << \".dds] \" << (this->imagePushed ? \"pushed to GPU\" : \"loaded and waiting\");\n}\nvoid DDSImage::postload()\n{\n\tthis->textureID = SOIL_load_OGL_texture_from_memory\n\t\t(\n\t\t\tthis->imageData,\n\t\t\tthis->imageDataSize,\n\t\t\tSOIL_LOAD_AUTO,\n\t\t\tSOIL_CREATE_NEW_ID,\n\t\t\tSOIL_FLAG_MIPMAPS | SOIL_FLAG_INVERT_Y | SOIL_FLAG_COMPRESS_TO_DXT\n\t\t);\n\tglTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);\n\tglTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);\n\tGLfloat largest_supported_anisotropy;\n\tglGetFloatv(GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT, &largest_supported_anisotropy);\n\tglTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAX_ANISOTROPY_EXT, largest_supported_anisotropy);\n}\nvoid DDSImage::bindTexture()\n{\n\t//std::cout << getName() << \" bind\" << std::endl;\n\tglBindTexture(GL_TEXTURE_2D, this->textureID);\n}\n" }, { "alpha_fraction": 0.5, "alphanum_fraction": 0.5714285969734192, "avg_line_length": 6, "blob_id": "b216e20f2339b9abf6e982ecde5e3317c530ec1e", "content_id": "ea74342c6d6cc502b5ee1a4a49df7eb60f4a19bc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 42, "license_type": "no_license", "max_line_length": 22, "num_lines": 6, "path": "/src/cpp/render/shaders/code/fontrender.c", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "//vertex --version 150\n\nvoid main()\n{\n\t\n}\n" }, { "alpha_fraction": 0.650231122970581, "alphanum_fraction": 0.6571648716926575, "avg_line_length": 22.889570236206055, "blob_id": "eddb13e6b7cdefea5f0df28214a0417727567df1", "content_id": "8ece3b6c9eceff4e8bebdc79772e319ab14cbf67", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3894, "license_type": "no_license", "max_line_length": 97, "num_lines": 163, "path": "/src/cpp/gui/Screen.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include <glm/glm.hpp>\n#include \"Element.hpp\"\n#include \"input/Controls.hpp\"\n#include <iostream>\n\n#include \"Screen.hpp\"\n\nScreen::Screen()\n{\n\tthis->selectedElement = 0;\n\tthis->elementSelectedWithMouse = false;\n\tthis->firstNext = 0;\n\tthis->firstPrev = 0;\n\tthis->firstLeft = 0;\n\tthis->firstRight = 0;\n\tthis->firstUp = 0;\n\tthis->firstDown = 0;\n\tthis->manager = 0;\n}\nScreen::~Screen()\n{\n\t\n}\nvoid Screen::addElement(Element *e)\n{\n\tthis->elements.push_back(e);\n\te->screen = this;\n}\nvoid Screen::render(util::DeltaTime &deltaTime, render::RenderManager &manager)\n{\n\tfor(std::list<Element *>::iterator it = this->elements.begin(); it!=this->elements.end(); ++it)\n\t{\n\t\tElement *e = *it;\n\t\te->render(deltaTime, manager);\n\t}\n}\nvoid Screen::selectElement(Element *element, bool mouseSelection)\n{\n\tif(this->selectedElement!=0)\n\t{\n\t\tthis->selectedElement->selected = false;\n\t}\n\tthis->selectedElement = element;\n\tif(this->selectedElement!=0)\n\t{\n\t\tthis->selectedElement->selected = true;\n\t}\n\tthis->elementSelectedWithMouse = mouseSelection;\n}\nbool Screen::onControlEvent(int control, int action)\n{\n\tif(action&CONTROL_ACTION_PRESS)\n\t{\n\t\tif(this->selectedElement!=0)\n\t\t{\n\t\t\tif((control&CONTROL_GUI_NEXT) && this->selectedElement->next!=0)\n\t\t\t{\n\t\t\t\tthis->selectElement(this->selectedElement->next, false);\n\t\t\t\treturn true;\n\t\t\t}\n\t\t\tif((control&CONTROL_GUI_PREV) && this->selectedElement->prev!=0)\n\t\t\t{\n\t\t\t\tthis->selectElement(this->selectedElement->prev, false);\n\t\t\t\treturn true;\n\t\t\t}\n\t\t\tif((control&CONTROL_GUI_LEFT) && this->selectedElement->left!=0)\n\t\t\t{\n\t\t\t\tthis->selectElement(this->selectedElement->left, false);\n\t\t\t\treturn true;\n\t\t\t}\n\t\t\tif((control&CONTROL_GUI_RIGHT) && this->selectedElement->right!=0)\n\t\t\t{\n\t\t\t\tthis->selectElement(this->selectedElement->right, false);\n\t\t\t\treturn true;\n\t\t\t}\n\t\t\tif((control&CONTROL_GUI_UP) && this->selectedElement->up!=0)\n\t\t\t{\n\t\t\t\tthis->selectElement(this->selectedElement->up, false);\n\t\t\t\treturn true;\n\t\t\t}\n\t\t\tif((control&CONTROL_GUI_DOWN) && this->selectedElement->down!=0)\n\t\t\t{\n\t\t\t\tthis->selectElement(this->selectedElement->down, false);\n\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\t\telse\n\t\t{\n\t\t\tif((control&CONTROL_GUI_NEXT) && this->firstNext!=0)\n\t\t\t{\n\t\t\t\tthis->selectElement(this->firstNext, false);\n\t\t\t\treturn true;\n\t\t\t}\n\t\t\tif((control&CONTROL_GUI_PREV) && this->firstPrev!=0)\n\t\t\t{\n\t\t\t\tthis->selectElement(this->firstPrev, false);\n\t\t\t\treturn true;\n\t\t\t}\n\t\t\tif((control&CONTROL_GUI_LEFT) && this->firstLeft!=0)\n\t\t\t{\n\t\t\t\tthis->selectElement(this->firstLeft, false);\n\t\t\t\treturn true;\n\t\t\t}\n\t\t\tif((control&CONTROL_GUI_RIGHT) && this->firstRight!=0)\n\t\t\t{\n\t\t\t\tthis->selectElement(this->firstRight, false);\n\t\t\t\treturn true;\n\t\t\t}\n\t\t\tif((control&CONTROL_GUI_UP) && this->firstUp!=0)\n\t\t\t{\n\t\t\t\tthis->selectElement(this->firstUp, false);\n\t\t\t\treturn true;\n\t\t\t}\n\t\t\tif((control&CONTROL_GUI_DOWN) && this->firstDown!=0)\n\t\t\t{\n\t\t\t\tthis->selectElement(this->firstDown, false);\n\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\t}\n\tif(this->selectedElement!=0)\n\t{\n\t\treturn this->selectedElement->onControlEvent(control, action);\n\t}\n\treturn false;\n}\nbool Screen::onControlEvent(int control, double x, double y, double dx, double dy)\n{\n\tif(control==CONTROL_ACTION_MOUSE)\n\t{\n\t\tif(this->selectedElement!=0 && this->elementSelectedWithMouse)\n\t\t{\n\t\t\t// Mouse de-selection only if selected with mouse\n\t\t\tif(!this->selectedElement->isInside(x, y))\n\t\t\t\tthis->selectElement(0, true);\n\t\t}\n\t\tfor(std::list<Element *>::iterator it = this->elements.begin(); it!=this->elements.end(); ++it)\n\t\t{\n\t\t\tElement *e = *it;\n\t\t\tif(e->isInside(x, y))\n\t\t\t{\n\t\t\t\tthis->selectElement(e, true);\n\t\t\t}\n\t\t}\n\t}\n\tif(this->selectedElement!=0)\n\t{\n\t\treturn this->selectedElement->onControlEvent(control, x, y, dx, dy);\n\t}\n\treturn false;\n}\nvoid Screen::onScreenResize()\n{\n\tfor(std::list<Element *>::iterator it = this->elements.begin(); it!=this->elements.end(); ++it)\n\t{\n\t\tElement *e = *it;\n\t\te->onScreenResize();\n\t}\n}\n\nbool Screen::supportsCursor() {\n\treturn true;\n}\n" }, { "alpha_fraction": 0.5420666337013245, "alphanum_fraction": 0.5573122501373291, "avg_line_length": 34.41999816894531, "blob_id": "28a503089757dcd5f326b5a0b1907b361d0c819a", "content_id": "ff60ff0c03891dafaeb02850768cd7a85aa62c30", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1771, "license_type": "no_license", "max_line_length": 264, "num_lines": 50, "path": "/hooks/asset_makemeta.py", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\nimport sys,re\n\nfrom asset_common import *\n\ndef printMaterials(source_path, fp, assetId, assetPath):\n path = getFilePath(source_path)\n with open(source_path, \"r\") as f:\n material_count = -1\n for line in f:\n splits = line[:-1].split(\" \")\n if splits[0]==\"newmtl\":\n material_count+=1\n print(\"// addMTLMaterial(\"+str(assetId)+\",\"+str(material_count)+\",\"+assetPath+\",\"+splits[1]+\")\", file=fp)\n\nif __name__==\"__main__\":\n sources = sys.argv[2:-2]\n metafile = sys.argv[-1]\n\n #print(\"Sources:\", sources)\n #print(\"Objects:\", objects)\n #print(\"Meta:\", metafile)\n\n meta_fp = open(metafile, \"w\")\n\n mtl_count = 0\n obj_count = 0\n md5mesh_count = 0\n md5anim_count = 0\n\n for i in range(len(sources)):\n fn = getFileName(sources[i])\n ext = getFileExtension(sources[i])\n print(\"\\n// addAsset(\"+str(i)+\",\"+sources[i]+\")\", file=meta_fp)\n print(\"#define ASSET_\"+fn.upper()+\"_\"+ext.upper().replace(\".\", \"_\")+\" \"+str(i), file=meta_fp)\n if ext==\"mtl\":\n mtl_count += 1\n printMaterials(sources[i], meta_fp, str(i), sources[i])\n elif ext==\"obj\":\n obj_count += 1\n elif ext==\"md5mesh\":\n md5mesh_count += 1\n elif ext==\"md5anim\":\n md5anim_count += 1\n elif ext==\"tga\" or ext==\"png\" or ext==\"jpg\":\n print(\"// addTexture(\"+str(i)+\",\"+sources[i]+\")\", file=meta_fp)\n\n print(\"\\n#define ASSETS_COUNT \"+str(len(sources))+\"\\n#define ASSETS_MTL_COUNT \"+str(mtl_count)+\"\\n#define ASSETS_OBJ_COUNT \"+str(obj_count)+\"\\n#define ASSETS_MD5MESH_COUNT \"+str(md5mesh_count)+\"\\n#define ASSETS_MD5ANIM_COUNT \"+str(md5anim_count), file=meta_fp)\n meta_fp.close()\n" }, { "alpha_fraction": 0.7121548652648926, "alphanum_fraction": 0.7223103642463684, "avg_line_length": 30.828283309936523, "blob_id": "69b83d40f5a6671b1b60c9ff1a5597ce67162335", "content_id": "545e0dc93c6e2f981c2aa8c166e72e8ecb915195", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3151, "license_type": "no_license", "max_line_length": 222, "num_lines": 99, "path": "/src/cpp/world/entities/Turret.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include <cmath>\n#include <iostream>\n#include <glm/gtx/quaternion.hpp>\n#include <glm/gtc/matrix_transform.hpp>\n#include <glm/gtx/vector_angle.hpp>\n\n#include \"res/md5/turret.md5mesh.h\"\n#include \"util/gl.h\"\n\n#include \"render/SkeletalAnimation.hpp\"\n\n#include \"world/entities/Enemy.hpp\"\n\n#include \"Turret.hpp\"\n\nusing namespace ai;\nusing namespace world;\nusing namespace entities;\nusing namespace render;\n\nTurret::Turret() : super()\n{\n\tthis->model = (SkeletalModel *)util::AssetManager::getAssetManager()->getAsset(ASSET_TURRET_MD5MESH);\n\tthis->initAnimation = (SkeletalAnimation *)util::AssetManager::getAssetManager()->getAsset(ASSET_TURRET_MD5ANIM);\n\tthis->animationDuration = this->initAnimation->getAnimationDuration();\n\tthis->animationCurrent = 0;\n\tthis->initAnimating = true;\n\tthis->selector = false;\n\tthis->bounds = &this->initAnimation->getFrameBounds(-1);\n\tthis->perception = new PerceptionManager((Entity *)this, {&typeid(Enemy)}, this->model->bindPoseSkeleton[ASSET_TURRET_MD5MESH_JOINT_CAMERASTALK].pos);\n\tthis->aimBot = new AimBot(this->perception);\n}\nTurret::~Turret()\n{\n\t\n}\n#define TRACKING_SPEED 10\n\nvoid Turret::tick(util::DeltaTime &deltaTime)\n{\n\tif(this->selector)\n\t\treturn;\n\tif(this->initAnimating)\n\t{\n\t\tif(deltaTime.getTime()-this->getSpawnTime()<=this->animationDuration)\n\t\t{\n\t\t\tthis->animationCurrent = deltaTime.getTime()-this->getSpawnTime();\n\t\t\tthis->bounds = &this->initAnimation->getFrameBounds(this->initAnimation->getFrame(this->animationCurrent));\n\t\t}\n\t\telse\n\t\t{\n\t\t\tthis->initAnimating = false;\n\t\t\tthis->bounds = &this->initAnimation->getFrameBounds(-1);\n\t\t}\n\t}\n\telse\n\t{\n\t\tthis->perception->tick(deltaTime);\n\t\tthis->aimBot->tick(deltaTime);\n\t}\n}\nvoid Turret::render(RenderManager &rManager)\n{\n\t//this->setOrientation(this->getOrientation()*glm::quat(glm::vec3(0, 0, glm::radians((float)(std::rand()%100)/10))));\n\t\n\trManager.pushMatrixM();\n\trManager.M = glm::translate(rManager.M, this->getPosition())*glm::toMat4(this->getOrientation());\n\n\trManager.markMDirty();\n\trManager.useShader(SHADER_fuzzyModel);\n\n\t/*Skeleton skel = this->model->bindPoseSkeleton;\n\tthis->model->render(rManager, skel);*/\n\t//std::cout << \"R time=\" << this->animTime << std::endl;\n\t//this->initAnimation->render(rManager, *this->model, this->animTime);\n\t//drone->renderSkeleton(rManager, skel);\n\t//drone->renderWeights(rManager, skel);\n\tif(this->selector)\n\t{\n\t\tthis->model->render(rManager, this->initAnimation->getSkeleton(0));\n\t}\n\telse if(this->initAnimating)\n\t{\n\t\tthis->model->render(rManager, this->initAnimation->getSkeleton(this->animationCurrent));\n\t\t//this->initAnimation->renderBounds(rManager, this->animationCurrent);\n\t}\n\telse\n\t{\n\t\tSkeleton skel = this->model->bindPoseSkeleton;\n\t\tskel[ASSET_TURRET_MD5MESH_JOINT_CAMERASTALK].ori = this->perception->getOrientation();//glm::quat(glm::vec3(this->currentPitch, 0, this->currentYaw));//, glm::vec3(0, 0, 1))*glm::angleAxis(this->pitch, glm::vec3(1,0,0));\n\t\tthis->model->render(rManager, skel);\n\t\tthis->aimBot->render(rManager);\n\t\t//this->initAnimation->renderBounds(rManager, this->animationCurrent);\n\t}\n\trManager.popMatrixM();\n}\nvoid Turret::die(double time, glm::vec3 direction, int type)\n{\n}\n" }, { "alpha_fraction": 0.6347565650939941, "alphanum_fraction": 0.6543663144111633, "avg_line_length": 30.95061683654785, "blob_id": "47d64a89c69c32e8dd0e983f59d95efee2654972", "content_id": "5741272e3131221a03c67aa829b129794057660b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 10352, "license_type": "no_license", "max_line_length": 152, "num_lines": 324, "path": "/src/cpp/ai/path/NavigationGraph.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"util/gl.h\"\n#include \"render/shaders/ShaderUtils.hpp\"\n#include \"render/BasicShapes.hpp\"\n#include <glm/gtc/matrix_transform.hpp>\n#include <glm/gtx/norm.hpp>\n#include \"render/RenderManager.hpp\"\n#include \"util/DeltaTime.hpp\"\n#include \"util/StreamUtils.hpp\"\n\n#include \"util/Globals.hpp\"\n\n#include \"NavigationGraph.hpp\"\n\nusing namespace ai::path;\nusing namespace util::StreamUtils;\nusing namespace render;\n\nNavigationGraph::NavigationGraph(int assetId, std::istream &fp) : Asset(assetId)\n{\n\tsetName(readString(fp));\n\tthis->numGroups = static_cast<unsigned int>(readInt(fp));\n\tthis->groupCounts = new int[this->numGroups];\n\tstd::fill(this->groupCounts, this->groupCounts+this->numGroups, 0);\n\tthis->numNodes = static_cast<unsigned int>(readInt(fp));\n\tthis->groupMasks = new int[this->numNodes];\n\tthis->nodes = new PathNode[this->numNodes];\n\tfor(unsigned int i=0;i<this->numNodes;i++)\n\t{\n\t\tPathNode &n = this->nodes[i];\n\t\tn.id = static_cast<int>(i);\n\t\tthis->groupMasks[i] = readInt(fp);\n\t\tfor(unsigned int j=0;j<this->numGroups;j++)\n\t\t{\n\t\t\tif((1<<j)&this->groupMasks[i])\n\t\t\t\tthis->groupCounts[j]++;\n\t\t}\n\t\tn.position.x = readFloat(fp);\n\t\tn.position.y = readFloat(fp);\n\t\tn.position.z = readFloat(fp);\n\t}\n\t// Create node links\n\tnumNodeLinks = static_cast<unsigned int>(readInt(fp));\n\tunsigned int tmp;\n\tfor(unsigned int i=0;i<numNodeLinks;i++)\n\t{\n\t\tPathNodeLink *pnLink = new PathNodeLink;\n\t\ttmp = static_cast<unsigned int>(readInt(fp));\n\t\tif(tmp>=this->numNodes)\n\t\t\tutil::Globals::fatalError(\"Node link a outside range \"+std::to_string(tmp)+\" vs \"+std::to_string(this->numNodes));\n\t\tpnLink->a = &this->nodes[tmp];\n\t\ttmp = static_cast<unsigned int>(readInt(fp));\n\t\tif(tmp>=this->numNodes)\n\t\t\tutil::Globals::fatalError(\"Node link b outside range\");\n\t\tpnLink->id = static_cast<int>(i);\n\t\tpnLink->b = &this->nodes[tmp];\n\t\tpnLink->dist = static_cast<double>(glm::distance(pnLink->a->position, pnLink->b->position));\n\t\tpnLink->a->links.push_back(pnLink);\n\t\tpnLink->b->links.push_back(pnLink);\n\t}\n}\nNavigationGraph::~NavigationGraph()\n{\n\tstd::set<struct PathNodeLink *> links;\n\tfor(unsigned int i=0;i<this->numNodes;i++)\n\t{\n\t\tauto *n = &this->nodes[i];\n\t\tfor(auto &nLink : n->links)\n\t\t\tlinks.insert(nLink);\n\t}\n\tfor(auto &n : links)\n\t\tdelete n;\n\tdelete [] this->nodes;\n}\nvoid NavigationGraph::write(std::ostream &ost) const\n{\n}\nvoid NavigationGraph::postload()\n{\n\tglGenVertexArrays(1, &this->vertexArrayId);\n\tglBindVertexArray(this->vertexArrayId);\n\n\n\t// Add vertex positions\n\tGLfloat *vertexPositionData = new GLfloat[this->numNodes*3];\n\tvertexColorData = new GLfloat[this->numNodes*3];\n\tfor(unsigned int i=0;i<this->numNodes;i++)\n\t{\n\t\tPathNode &n = this->nodes[i];\n\t\tvertexPositionData[i*3+0] = n.position.x;\n\t\tvertexPositionData[i*3+1] = n.position.y;\n\t\tvertexPositionData[i*3+2] = n.position.z;\n\t}\n\tglGenBuffers(1, &this->vertexPositionBufferId);\n\tglBindBuffer(GL_ARRAY_BUFFER, this->vertexPositionBufferId);\n\tglBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat)*this->numNodes*3, vertexPositionData, GL_STATIC_DRAW);\n\n\tglGenBuffers(1, &this->vertexColorBufferID);\n\n\tGLuint *vertexIndexBuffer = new GLuint[this->numNodeLinks*2];\n\tint j = 0;\n\tfor(unsigned int i=0;i<this->numNodes;i++)\n\t{\n\t\tfor(auto l : this->nodes[i].links)\n\t\t{\n\t\t\tif(l->a==&this->nodes[i])\n\t\t\t{\n\t\t\t\tvertexIndexBuffer[j*2+0] = l->a->id;\n\t\t\t\tvertexIndexBuffer[j*2+1] = l->b->id;\n\t\t\t\tj++;\n\t\t\t}\n\t\t}\n\t}\n\tglGenBuffers(1, &this->indexBufferID);\n\tglBindBuffer(GL_ELEMENT_ARRAY_BUFFER, this->indexBufferID);\n\tglBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(GLuint)*this->numNodeLinks*2, vertexIndexBuffer, GL_STATIC_DRAW);\n\n\tlinkColorData = new GLfloat[this->numNodeLinks*3];\n\tglGenBuffers(1, &this->indexColorBufferID);\n}\nvoid NavigationGraph::render(render::RenderManager &rManager)\n{\n\trManager.M = glm::mat4(1.0f);\n\trManager.markMDirty();\n\tshaders::ShaderProgram *shader = rManager.useShader(SHADER_fuzzyModel);\n\n\tglBindVertexArray(this->vertexArrayId);\n\t//glEnable(GL_BLEND);\n\t\n\tglBindBuffer(GL_ARRAY_BUFFER, this->vertexPositionBufferId);\n\tshader->setVertexAttributePointer(SHADERVAR_vertex_position, 3, GL_FLOAT, GL_FALSE, sizeof(GLfloat)*3, 0);\n\n\tfor(unsigned int i=0;i<this->numNodes;i++)\n\t{\n\t\tPathNode &node = this->nodes[i];\n\t\tglm::vec3 vertexColors;\n\t\tif(node.current)\n\t\t\tvertexColors = {1.0f, 0.f, 0.f};\n\t\telse if(node.closed)\n\t\t\tvertexColors = {1.0f, 1.f, 0.f};\n\t\telse if(node.open)\n\t\t\tvertexColors = {0.0f, 1.f, 0.f};\n\t\telse\n\t\t\tvertexColors = {0.0f, 0.f, 1.f};\n\t\t*(vertexColorData+i*3+0) = vertexColors[0];\n\t\t*(vertexColorData+i*3+1) = vertexColors[1];\n\t\t*(vertexColorData+i*3+2) = vertexColors[2];\n\t}\n\n\tglBindBuffer(GL_ARRAY_BUFFER, this->vertexColorBufferID);\n\tglBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat)*this->numNodes*3, this->vertexColorData, GL_DYNAMIC_DRAW);\n\tshader->setVertexAttributePointer(SHADERVAR_vertex_color, 3, GL_FLOAT, GL_FALSE, sizeof(GLfloat)*3, 0);\n\n\tglPointSize(4.f);\n\tglDrawArrays(GL_POINTS, 0, this->numNodes*3);\n\n\tglBindBuffer(GL_ARRAY_BUFFER, this->vertexPositionBufferId);\n\tshader->setVertexAttributePointer(SHADERVAR_vertex_position, 3, GL_FLOAT, GL_FALSE, sizeof(GLfloat)*3, 0);\n\n\t// Draw the links\n\t//int j = 0;\n\t//for(unsigned int i=0;i<this->numNodes;i++)\n\t//{\n\t//\tfor(auto l : this->nodes[i].links)\n\t//\t{\n\t//\t\tif(l->a==&this->nodes[i])\n\t//\t\t{\n\t//\t\t\tglm::vec3 linkColor;// = *(glm::vec3 *)&this->linkColorData[j*3];\n\t//\t\t\tif(l->a->current && l->b->current)\n\t//\t\t\t\tlinkColor = {1.0f, 0.0f, 0.0f};\n\t//\t\t\telse if((l->a->current && l->b->closed) && (l->b->current && l->a->closed))\n\t//\t\t\t\tlinkColor = {1.0f, 1.0f, 0.0f};\n\t//\t\t\telse if((l->a->closed && l->b->open) && (l->b->closed && l->a->open))\n\t//\t\t\t\tlinkColor = {0.0f, 1.0f, 0.0f};\n\t//\t\t\telse\n\t//\t\t\t\tlinkColor = {0.0f, 0.0f, 1.0f};\n\t//\t\t\t*(this->linkColorData+j*3) = linkColor[0];\n\t//\t\t\tj++;\n\t//\t\t}\n\t//\t}\n\t//}\n\t//glBindBuffer(GL_ARRAY_BUFFER, this->indexColorBufferID);\n\t//glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat)*this->numNodeLinks*3, this->linkColorData, GL_DYNAMIC_DRAW);\n\t//shader->setVertexAttributePointer(SHADERVAR_vertex_color, 3, GL_FLOAT, GL_FALSE, sizeof(GLfloat)*3, 0);\n\n\tglBindBuffer(GL_ELEMENT_ARRAY_BUFFER, this->indexBufferID);\n\tglDrawElements(GL_LINES, this->numNodeLinks*2, GL_UNSIGNED_INT, 0);\n\t//\n\n\t//std::cout << this->vertexArrayId << \":\" << this->vertexPositionBufferId << \", \" << this->vertexColorBufferId << std::endl;\n\n//\trManager.M = glm::mat4(1.0f);\n//\trManager.markMDirty();\n//\tshaders::ShaderProgram *shader = rManager.useShader(SHADER_solidColor);\n//\n//\tGLint vploc = shader->getShaderLocation(false, SHADERVAR_vertex_position);\n//\tGLint loc = shader->getShaderLocation(true, SHADER_solidColor_solidColor);\n//\n//\t\n//\tfor(int i=0;i<this->numNodes;i++)\n//\t//for(std::vector<struct PathNode *>::iterator it = this->nodes.begin(); it != this->nodes.end(); it++)\n//\t{\n//\t\t//struct PathNode *node = *it;\n//\t\tPathNode &node = this->nodes[i];\n//\t\t// Draw a point for the node\n//\t\tif(node.current)\n//\t\t\tglUniform4f(loc, 1.0f, 0.f, 0.f, 1.f);\n//\t\telse if(node.open)\n//\t\t\tglUniform4f(loc, 0.0f, 1.f, 0.f, 1.f);\n//\t\telse if(node.closed)\n//\t\t\tglUniform4f(loc, 1.0f, 1.f, 0.f, 1.f);\n//\t\telse\n//\t\t\tcontinue;\n//\t\t\t//glUniform4f(loc, 0.0f, 0.f, 1.f, 1.f);\n//\t\trManager.M = glm::translate(glm::mat4(1.0f), node.position);\n//\t\trManager.markMDirty();\n//\t\trManager.setShaderMatricies(*shader);\n//\t\tBasicShapes::drawPoint(4,vploc);\n//\n//\t\t// Draw all node links\n//\t\trManager.M = glm::mat4(1.0f);\n//\t\trManager.markMDirty();\n//\t\trManager.setShaderMatricies(*shader);\n//\t\tfor(std::vector<struct PathNodeLink *>::iterator j = node.links.begin(); j != node.links.end(); j++)\n//\t\t{\n//\t\t\tstruct PathNodeLink *nodeLnk = *j;\n//\t\t\tif(nodeLnk->a->current && nodeLnk->b->current)\n//\t\t\t\tglUniform4f(loc, 1.0f, 0.f, 0.f, 1.f);\n//\t\t\telse if((nodeLnk->a->closed && (nodeLnk->b->current || nodeLnk->b->closed)) || (nodeLnk->b->closed && (nodeLnk->a->current || nodeLnk->a->closed)))\n//\t\t\t\tglUniform4f(loc, 1.0f, 1.f, 0.f, 1.f);\n//\t\t\telse if((nodeLnk->a->open && nodeLnk->b->closed) || (nodeLnk->b->open && nodeLnk->a->closed))\n//\t\t\t\tglUniform4f(loc, 0.0f, 1.f, 0.f, 1.f);\n//\t\t\telse if(\n//\t\t\t\t((nodeLnk->a->current || nodeLnk->a->open || nodeLnk->a->closed) && (!nodeLnk->b->current && !nodeLnk->b->open && !nodeLnk->b->closed)) || \n//\t\t\t\t((nodeLnk->b->current || nodeLnk->b->open || nodeLnk->b->closed) && (!nodeLnk->a->current && !nodeLnk->a->open && !nodeLnk->a->closed))\n//\t\t\t )\n//\t\t\t\tglUniform4f(loc, 0.0f, 0.f, 1.f, 1.f);\n//\t\t\telse\n//\t\t\t\tcontinue;\n//\t\t\t//{\n//\t\t\t//\tif(nodeLnk->a->closed && nodeLnk->b->closed)\n//\t\t\t//\t\tglUniform4f(loc, 1.0f, 1.f, 0.f, 1.f);\n//\t\t\t//\telse if(nodeLnk->a->open && nodeLnk->b->open)\n//\t\t\t//\t\tglUniform4f(loc, 0.0f, 1.f, 0.f, 1.f);\n//\t\t\t//\telse\n//\t\t\t//\t\tglUniform4f(loc, 0.0f, 0.f, 1.f, 1.f);\n//\t\t\t//}\n//\t\t\tBasicShapes::drawLine(nodeLnk->a->position, nodeLnk->b->position, vploc);\n//\t\t}\n//\t}\n//\tglDisable(GL_BLEND);\n}\nPathNode *NavigationGraph::getRandomNode(int group)\n{\n\tint r = std::rand()%this->groupCounts[group];\n\tint c = 0;\n\tfor(unsigned int i=0;i<this->numNodes;i++)\n\t{\n\t\tif(this->groupMasks[i]&(1<<group))\n\t\t{\n\t\t\tif(c==r)\n\t\t\t\treturn &this->nodes[i];\n\t\t\tc++;\n\t\t}\n\t}\n\treturn 0;\n}\nPathNode *NavigationGraph::getNearestPathNode(glm::vec3 position, int mask)\n{\n\tPathNode *finalPathNode = 0;\n\tfloat min = std::numeric_limits<float>::max();\n\tfor(unsigned int i=0;i<this->numNodes;i++)\n\t{\n\t\tPathNode *n = &this->nodes[i];\n\t\tif(this->groupMasks[i]&mask)\n\t\t{\n\t\t\tfloat f = glm::length2(n->position-position);\n\t\t\tif(min>f)\n\t\t\t{\n\t\t\t\tfinalPathNode = n;\n\t\t\t\tmin = f;\n\t\t\t}\n\t\t}\n\t}\n\treturn finalPathNode;\n}\nPathNode *NavigationGraph::getNearestPathNode(glm::vec3 position)\n{\n\tPathNode *finalPathNode = 0;\n\tfloat min = std::numeric_limits<float>::max();\n\tfor(unsigned int i=0;i<this->numNodes;i++)\n\t{\n\t\tPathNode *n = &this->nodes[i];\n\t\tfloat f = glm::length2(n->position-position);\n\t\tif(min>f)\n\t\t{\n\t\t\tfinalPathNode = n;\n\t\t\tmin = f;\n\t\t}\n\t}\n\treturn finalPathNode;\n}\nPathNode *NavigationGraph::getNearestPathNodeFromPool(glm::vec3 position, std::set<int> &pool)\n{\n\tPathNode *finalPathNode = 0;\n\tfloat min = std::numeric_limits<float>::max();\n\tfor(unsigned int i=0;i<this->numNodes;i++)\n\t{\n\t\tPathNode *n = &this->nodes[i];\n\t\tif(pool.find(n->id)!=pool.end())\n\t\t{\n\t\t\tfloat f = glm::length2(n->position-position);\n\t\t\tif(min>f)\n\t\t\t{\n\t\t\t\tfinalPathNode = n;\n\t\t\t\tmin = f;\n\t\t\t}\n\t\t}\n\t}\n\treturn finalPathNode;\n}\nint NavigationGraph::getPathNodeGroupMask(int id)\n{\n\treturn this->groupMasks[id];\n}\n" }, { "alpha_fraction": 0.7850467562675476, "alphanum_fraction": 0.7850467562675476, "avg_line_length": 16.83333396911621, "blob_id": "68d189f57bab797ed53c8938c483e08e5620cf37", "content_id": "a8cad5b72e4c0d087acae050a885625b944b4d5b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 107, "license_type": "no_license", "max_line_length": 32, "num_lines": 6, "path": "/src/cpp/util/DebugFlags.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_UTIL_DEBUGFLAGS_HPP_\n#define FYP_UTIL_DEBUGFLAGS_HPP_\n\n#define ENABLE_DEBUG_PATHRENDER\n\n#endif\n" }, { "alpha_fraction": 0.6546924710273743, "alphanum_fraction": 0.6708633899688721, "avg_line_length": 28.19898796081543, "blob_id": "9e9318db9b80b220fcbf80809ffa16fb3461d700", "content_id": "76ed0bb826db628a8c0e221c55b977249597e853", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 17315, "license_type": "no_license", "max_line_length": 165, "num_lines": 593, "path": "/src/cpp/world/World.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include <cmath>\n#include <iostream>\n#include <glm/glm.hpp>\n#include <glm/vec3.hpp>\n#include <glm/gtc/matrix_transform.hpp>\n\n#include \"util/gl.h\"\n#include \"util/DeltaTime.hpp\"\n#include \"util/AssetManager.hpp\"\n#include \"util/Boundaries.hpp\"\n#include \"util/QuaternionUtils.hpp\"\n\n#include \"render/BasicShapes.hpp\"\n#include \"render/shaders/ShaderUtils.hpp\"\n#include \"render/RenderManager.hpp\"\n#include \"render/StaticModel.hpp\"\n#include \"render/SkeletalModel.hpp\"\n#include \"render/SkeletalAnimation.hpp\"\n\n#include \"world/entities/Enemy.hpp\"\n#include \"world/entities/Player.hpp\"\n#include \"world/entities/SecurityCamera.hpp\"\n#include \"world/entities/Turret.hpp\"\n\n#include \"input/controls/PlayerGameControls.hpp\"\n\n//debug\n#include \"input/controls/DebugControls.hpp\"\n#include \"world/collisions/StaticMesh.hpp\"\n#include <glm/gtx/quaternion.hpp>\n#include \"ai/path/PathFinder.hpp\"\n//debug end\n\n#include \"World.hpp\"\n\nusing namespace util;\nusing namespace world;\nusing namespace entities;\nusing namespace render;\nusing namespace util::Boundaries;\nusing namespace ai::path;\n\nAABB aabb;\n\n//debug\nfloat distance = 5.f;\nfloat size = 0.5f;\nbool debug_renderEntityMarkers = false;\nbool debug_renderEntityBounds = false;\nint debug_renderWireframe = 0;\nbool debug_renderNavGraph = false;\nPathNode *a=0,*b=0;\nPathFinder *f = new PathFinder();\nglm::vec3 debug_point;\nglm::vec3 debug_point2,debug_point3;\n//dbend\n\nWorld::World(screens::GameView *gameView)\n{\n\tthis->gameView = gameView;\n\tthis->world_interactive_model = (render::StaticModel *)util::AssetManager::getAssetManager()->getAsset(ASSET_WORLD_INTERACTIVE_OBJ);\n\tthis->world_aesthetic_model = 0;\n\tthis->world_skybox = (render::StaticModel *)util::AssetManager::getAssetManager()->getAsset(ASSET_SKYBOX_OBJ);\n\tthis->world_interactive_collision = new collisions::StaticMesh();\n\tthis->world_navigation_graph = (NavigationGraph *)util::AssetManager::getAssetManager()->getAsset(ASSET_WORLD_NAV_OBJ);\n\n\tthis->selector = SELECTOR_OFF;\n\tthis->enableSelector = true;\n\tthis->selectorCamera = new SecurityCamera();\n\tthis->selectorTurret = new Turret();\n\n\tthis->entities.clear();\n\tthis->spawningEntities.clear();\n\t\n\tthis->player = new Player();\n\tthis->enemy = new Enemy(this);\n\t\n\tthis->controlScheme = new controls::PlayerGameControls((Entity *)this->player, this->world_interactive_collision);\n\t\n\tthis->vertAngle = 0;\n\tthis->horizAngle = 0;\n\tthis->lastX = 0;\n\tthis->lastY = 0;\n\tthis->selectorYaw = 0.f;\n\t\n\tthis->world_interactive_collision->addStaticModel(this->world_interactive_model);\n\t\n\t\n\tthis->selectorCamera->selector = true;\n\tthis->selectorTurret->selector = true;\n\t\n\tthis->add(this->selectorCamera, glm::vec3(20,0,5));\n\tthis->add(this->selectorTurret, glm::vec3(20,0,5));\n\tthis->add(this->player, glm::vec3(0,-20,2));\n\tthis->add(this->enemy, glm::vec3(0,-10,0));\n\t\n\tthis->remainingTurrets = 3;\n\tthis->remainingCameras = 5;\n\n\tstd::ifstream wf(\"./player.dat\");\n\tif(wf.is_open())\n\t{\n\t\tglm::vec3 a;\n\t\tglm::quat q;\n\t\twf.read((char *)&a, sizeof(glm::vec3));\n\t\twf.read((char *)&q, sizeof(glm::quat));\n\t\twf.close();\n\t\tthis->player->setPosition(a);\n\t\tthis->player->setOrientation(q);\n\t}\n}\nWorld::~World()\n{\n\tstd::ofstream wf(\"./player.dat\");\n\tglm::vec3 p = this->player->getPosition();\n\twf.write((char *)&p, sizeof(glm::vec3));\n\tglm::quat q = this->player->getOrientation();\n\twf.write((char *)&q, sizeof(glm::quat));\n\twf.close();\n\tdelete this->world_interactive_collision;\n\tdelete this->controlScheme;\n\tfor(auto ent : this->spawningEntities)\n\t\tdelete ent;\n\tfor(auto ent : this->entities)\n\t\tdelete ent;\n}\nvoid World::add(Entity *ent, glm::vec3 location)\n{\n\tent->setPosition(location);\n\tthis->spawningEntities.push_back(ent);\n\t//ent->world = this;\n\t//this->entities.push_back(ent);\n}\nutil::DeltaTime *debug_deltaTime;\nvoid World::tick(util::DeltaTime &deltaTime, bool surface)\n{\n\tdebug_deltaTime = &deltaTime;\n\tif(surface)\n\t{\n\t\t//this->enemy->setPosition(glm::vec3(std::sin(deltaTime.getTime()/2)*10, 3, 0));\n\t\t//this->camera->setPosition(glm::vec3(std::sin(deltaTime.getTime())*4, std::cos(deltaTime.getTime())*4, std::sin(deltaTime.getTime()/2+M_PI/2)*5+10));\n\t\t//this->camera->setOrientation(glm::quat(glm::vec3(deltaTime.getTime(), 0, 0)));\n\t\t//this->controlScheme->tick(deltaTime);\n\t\tfor(Entity *ent : this->entities)\n\t\t{\n\t\t\tent->tick(deltaTime);\n\t\t}\n\t\tEntity *ent;\n\t\tfor(std::list<Entity *>::iterator it = this->spawningEntities.begin(); it!=this->spawningEntities.end();)\n\t\t{\n\t\t\tent = *it;\n\t\t\tit = this->spawningEntities.erase(it);\n\t\t\tent->addedToWorld(this, deltaTime.getTime());\n\t\t\tthis->entities.push_back(ent);\n\t\t}\n\t\t((controls::PlayerGameControls *)this->controlScheme)->tick2(0, *debug_deltaTime);\n\t\tthis->viewDirection = this->player->getOrientation()*glm::vec3( 0, 1, 0);\n\t\tthis->viewUp = this->player->getOrientation()*glm::vec3(0, 0, 1);\n\t}\n\tif(this->enableSelector)\n\t{\n\t\tRaycast raycast;\n\t\traycast.origin = this->player->getPosition();\n\t\traycast.direction = this->viewDirection;\n\t\traycast.maxDistance = 8;\n\t\tRaycastResult result = this->world_interactive_collision->rayCast(raycast);\n\t\tthis->selectorRemove = 0;\n\n\t\t//for(auto e : this->entities)\n\t\t//{\n\t\t//\tif(e!=this->selectorTurret && dynamic_cast<Turret *>(e)!=0)\n\t\t//\t{\n\t\t//\t\tAABB aabb = e->getBounds()->translate(e->getPosition());\n\t\t//\t\tif(aabb.rayCastCheck(raycast))\n\t\t//\t\t{\n\t\t//\t\t\tthis->selector = SELECTOR_REMOVE;\n\t\t//\t\t\tthis->selectorRemove = e;\n\t\t//\t\t\tthis->selectorCamera->doRender = false;\n\t\t//\t\t\tthis->selectorTurret->doRender = false;\n\t\t//\t\t}\n\t\t//\t}\n\t\t//\tif(e!=this->selectorCamera && dynamic_cast<SecurityCamera *>(e)!=0)\n\t\t//\t{\n\t\t//\t\tAABB aabb = e->getBounds()->translate(e->getPosition());\n\t\t//\t\tif(aabb.rayCastCheck(raycast))\n\t\t//\t\t{\n\t\t//\t\t\tthis->selector = SELECTOR_REMOVE;\n\t\t//\t\t\tthis->selectorRemove = e;\n\t\t//\t\t\tthis->selectorCamera->doRender = false;\n\t\t//\t\t\tthis->selectorTurret->doRender = false;\n\t\t//\t\t}\n\t\t//\t}\n\t\t//}\n\t\tif(result.hit)\n\t\t{\n\t\t\traycast.maxDistance = result.distance;\n\t\t\tglm::vec3 rayHit = this->player->getPosition()+(this->viewDirection*result.distance);\n\t\t\t//std::cout << \"RAY: \" << result.distance << \"hitNormal=(\" << result.hitNormal.x << \", \" << result.hitNormal.y << \", \" << result.hitNormal.z << \") \" << std::endl;\n\t\t\tdouble pitch = -std::atan2(std::sqrt(result.hitNormal.x*result.hitNormal.x+result.hitNormal.y*result.hitNormal.y), result.hitNormal.z)+glm::radians(90.f);\n\t\t\tAABB aabb;\n\t\t\tif(pitch>=M_PI/2-0.01f)\n\t\t\t{\n\t\t\t\tthis->selector = SELECTOR_TURRET;\n\t\t\t\tthis->selectorCamera->doRender = false;\n\t\t\t\tthis->selectorTurret->doRender = true;\n\t\t\t\tthis->selectorTurret->setPosition(rayHit);\n\t\t\t\taabb = this->selectorTurret->getBounds()->translate(this->selectorTurret->getPosition());\n\t\t\t}\n\t\t\telse\n\t\t\t{\n\t\t\t\tthis->selector = SELECTOR_SECURITY_CAMERA;\n\t\t\t\tthis->selectorCamera->doRender = true;\n\t\t\t\tthis->selectorTurret->doRender = false;\n\t\t\t\tif(pitch>=-M_PI/2+0.01f)\n\t\t\t\t\tthis->selectorYaw = -std::atan2(result.hitNormal.x, result.hitNormal.y);\n\t\t\t\tthis->selectorCamera->setOrientation(glm::quat(glm::vec3(pitch, 0, this->selectorYaw)));\n\t\t\t\tthis->selectorCamera->setPosition(rayHit);\n\t\t\t\taabb = this->selectorCamera->getBounds()->translate(this->selectorCamera->getPosition());\n\t\t\t}\n\t\t\tAABB *_eaabb,eaabb;\n\t\t\tfor(auto e : this->entities)\n\t\t\t{\n\t\t\t\tif(e==this->selectorTurret || e==this->selectorCamera)\n\t\t\t\t\tcontinue;\n\t\t\t\t_eaabb = e->getBounds();\n\t\t\t\tif(_eaabb==0)\n\t\t\t\t\tcontinue;\n\t\t\t\teaabb = _eaabb->translate(e->getPosition());\n\t\t\t\tif(dynamic_cast<Turret *>(e)!=0 || dynamic_cast<SecurityCamera *>(e)!=0)\n\t\t\t\t{\n\t\t\t\t\tif(eaabb.rayCastCheck(raycast) || eaabb.checkIntersect(aabb))\n\t\t\t\t\t{\n\t\t\t\t\t\tthis->selector = SELECTOR_REMOVE;\n\t\t\t\t\t\tthis->selectorCamera->doRender = false;\n\t\t\t\t\t\tthis->selectorTurret->doRender = false;\n\t\t\t\t\t\tthis->selectorRemove = e;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\telse if((e!=this->player && eaabb.rayCastCheck(raycast)) || eaabb.checkIntersect(aabb))\n\t\t\t\t{\n\t\t\t\t\tthis->selector = SELECTOR_OFF;\n\t\t\t\t\tthis->selectorCamera->doRender = false;\n\t\t\t\t\tthis->selectorTurret->doRender = false;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\telse\n\t\t{\n\t\t\tthis->selector = SELECTOR_OFF;\n\t\t\tthis->selectorCamera->doRender = false;\n\t\t\tthis->selectorTurret->doRender = false;\n\t\t}\n\t}\n\t///////////////////\n\t// DEBUG TICKING //\n\t///////////////////\n\t\n\tRaycast raycast;\n\traycast.origin = this->player->getPosition();\n\traycast.direction = this->viewDirection;\n\traycast.maxDistance = 0;\n\tRaycastResult result = this->world_interactive_collision->rayCast(raycast),*_result;\n\tif(result.hit)\n\t\traycast.maxDistance = result.distance;\n\tAABB *_eaabb,eaabb;\n\tfor(auto e : this->entities)\n\t{\n\t\tif(e==this->selectorTurret || e==this->selectorCamera || e==this->player)\n\t\t\tcontinue;\n\t\t_eaabb = e->getBounds();\n\t\tif(_eaabb==0)\n\t\t\tcontinue;\n\t\teaabb = _eaabb->translate(e->getPosition());\n\t\t_result = eaabb.rayCast(raycast);\n\t\tif(_result!=0)\n\t\t{\n\t\t\tif(_result->distance<result.distance)\n\t\t\t\tresult = *_result;\n\t\t\tdelete _result;\n\t\t}\n\t}\n\t\n\tif(result.hit)\n\t{\n\t\tglm::vec3 boxCent = this->player->getPosition()+this->viewDirection*result.distance;\n\t\tdebug_point = boxCent;\n\t\taabb.boxCenter[0] = boxCent.x;\n\t\taabb.boxCenter[1] = boxCent.y;\n\t\taabb.boxCenter[2] = boxCent.z;\n\t\taabb.boxHalfSize[0] = size/2;\n\t\taabb.boxHalfSize[1] = size/2;\n\t\taabb.boxHalfSize[2] = size/2;\n\t}\n}\ndouble d = 0;\nvoid World::render3D(render::RenderManager &rManager, bool isSurface)\n{\n\t// Setup rendering\n\trManager.enableCullFace();\n\t\n\t// Setup skybox view matrix\n\trManager.V = glm::lookAt(\n\t\t\tglm::vec3(0,0,1.f),\n\t\t\tglm::vec3(0,0,1.f)+viewDirection,\n\t\t\tviewUp\n\t\t);\n\trManager.M = glm::mat4(1.0f), glm::vec3(1,1,1);\n\trManager.markVDirty();\n\trManager.markMDirty();\n\tglCullFace(GL_FRONT);\n\tthis->world_skybox->render(rManager, SHADER_UVTest);\n\tglCullFace(GL_BACK);\n\t\n\t// Setup view matrix\n\trManager.V = glm::lookAt(\n\t\t\tthis->player->getPosition(),\n\t\t\tthis->player->getPosition() + viewDirection,\n\t\t\tviewUp\n\t\t);\n\trManager.markVDirty();\n\n\trManager.enableDepth();\n\n\t\n\t// Render the world model\n\trManager.M = glm::mat4(1.0f);\n\trManager.markMDirty();\n\tthis->world_interactive_model->render(rManager, SHADER_UVTest);\n\n\tfor(Entity *ent : this->entities)\n\t{\n\t\t//if(this->selector!=SELECTOR_SECURITY_CAMERA && this->selectorCamera==ent)\n\t\t//\tcontinue;\n\t\t//if(this->selector!=SELECTOR_TURRET && this->selectorTurret==ent)\n\t\t//\tcontinue;\n\t\t//if(ent==this->player)\n\t\t//\tcontinue;\n\t\tif(ent->doRender)\n\t\t\tent->render(rManager);\n\t}\n\t\n\t/////////////////////\n\t// DEBUG RENDERING //\n\t/////////////////////\n\n\trManager.disableDepth();\n\trManager.disableCullFace();\n\n\n\n\tif(debug_renderEntityMarkers || debug_renderEntityBounds)\n\t{\n\t\tfor(auto ent : this->entities)\n\t\t{\n\t\t\tif(ent->doRender)\n\t\t\t\tent->renderDebug(rManager, debug_renderEntityMarkers, debug_renderEntityBounds);\n\t\t}\n\t}\n\tif(debug_renderEntityMarkers)\n\t{\n\t\t// Draw world origin marker\n\t\trManager.renderDirectionVector(glm::vec3(0,0,0), glm::vec3(10, 0, 0), glm::vec4(1.f, 0.f, 0.f, 1.f));\n\t\trManager.renderDirectionVector(glm::vec3(0,0,0), glm::vec3( 0,10, 0), glm::vec4(0.f, 1.f, 0.f, 1.f));\n\t\trManager.renderDirectionVector(glm::vec3(0,0,0), glm::vec3( 0, 0,10), glm::vec4(0.f, 0.f, 1.f, 1.f));\n\t\t//rManager.renderOrientation(this->player->getPosition(), this->player->getOrientation());\n\t\t//rManager.renderDirectionVector(debug_point, debug_point2-debug_point, glm::vec4(1.0f, 0.f, 0.f, 1.0f));\n\t\trManager.renderDirectionVector(glm::vec3(), debug_point, glm::vec4(1.0f, 0.f, 0.f, 1.0f));\n\t\trManager.renderDirectionVector(glm::vec3(), debug_point2, glm::vec4(0.0f, 1.f, 0.f, 1.0f));\n\t\trManager.renderDirectionVector(glm::vec3(), debug_point3, glm::vec4(0.0f, 0.f, 1.f, 1.0f));\n\t}\n\t// Render bounds of entities\n\tif(debug_renderEntityBounds)\n\t{\n\t\taabb.render(rManager, glm::vec4(1.0f, 0.0f, 0.0f, 0.8f), false);\n\t}\n\t// Wireframe\n\tif(debug_renderWireframe==1)\n\t{\n\t\tglPolygonMode(GL_FRONT_AND_BACK, GL_LINE);\n\t}\n\telse if(debug_renderWireframe==2)\n\t{\n\t\trManager.M = glm::mat4(1.0f);\n\t\trManager.markMDirty();\n\t\tglPolygonMode(GL_FRONT_AND_BACK, GL_LINE);\n\t\tthis->world_interactive_model->render(rManager, SHADER_UVTest);\n\t\tglPolygonMode(GL_FRONT_AND_BACK, GL_FILL);\n\t}\n\telse\n\t{\n\t\tglPolygonMode(GL_FRONT_AND_BACK, GL_FILL);\n\t}\n\t// Navgraph\n\tif(debug_renderNavGraph)\n\t\tthis->world_navigation_graph->render(rManager);\n\n\t//rManager.renderDirectionVector(this->selectorCamera->getPosition(), debug_rightVector, glm::vec4(0.f,1.f,1.f,1.f));\n\t//rManager.renderDirectionVector(this->selectorCamera->getPosition(), debug_relUpVector, glm::vec4(1.f,0.f,1.f,1.f));\n\t//rManager.renderDirectionVector(this->selectorCamera->getPosition(), debug_hitNormal, glm::vec4(1.f,1.f,0.f,1.f));\n\n\t//if(isSurface)\n\t\t//((controls::PlayerGameControls *)this->controlScheme)->tick2(rManager, *debug_deltaTime);\n\n\n\t//glm::vec3 boxCent;\n\n\t//world_model->getBounds().render(rManager, glm::vec4(0.f, 0.f, 1.f, 1.f), false);\n\t//sm->render(rManager, raycast);\n\t//sm->render(rManager, aabb);\n\tdouble j = glfwGetTime();\n\tif(f!=0 && !f->done)// && j>d)\n\t{\n\t\td = j;\n\t\tif(f->tick(5))\n\t\t{\n\t\t\tPRINT_DEBUG(\"DONE\");\n\t\t\tif(f->sucess)\n\t\t\t\tthis->enemy->setPath(f->getPath());\n\t\t}\n\t}\n\telse\n\t{\n\t\t/*\n\t\tfor(int i=0;i<this->world_navigation_graph->numNodes;i++)\n\t\t{\n\t\t\tPathNode *n = &this->world_navigation_graph->nodes[i];\n\t\t\tif(aabb.checkInside(n->position))\n\t\t\t\tn->current = true;\n\t\t\telse\n\t\t\t\tn->current = false;\n\t\t}\n\t\t*/\n\t}\n}\nvoid World::render2D(render::RenderManager &rManager, bool isSurface)\n{\n\t\n}\nstd::list<world::Entity *> *World::getEntities()\n{\n\treturn &this->entities;\n}\nutil::Boundaries::RaycastResult World::rayCast(util::Boundaries::Raycast &raycast, world::Entity **entity)\n{\n\tEntity *skipEnt = *entity;\n\t*entity = 0;\n\tRaycastResult result = this->world_interactive_collision->rayCast(raycast);\n\tRaycastResult *_result;\n\tif(result.hit)\n\t\traycast.maxDistance = result.distance;\n\tAABB *_eaabb,eaabb;\n\tfor(auto e : this->entities)\n\t{\n\t\tif(e==this->selectorTurret || e==this->selectorCamera || e==skipEnt)\n\t\t\tcontinue;\n\t\t_eaabb = e->getBounds();\n\t\tif(_eaabb==0)\n\t\t\tcontinue;\n\t\teaabb = _eaabb->translate(e->getPosition());\n\t\t_result = eaabb.rayCast(raycast);\n\t\tif(_result!=0)\n\t\t{\n\t\t\tif(_result->distance<result.distance)\n\t\t\t{\n\t\t\t\t*entity = e;\n\t\t\t\tresult = *_result;\n\t\t\t}\n\t\t\tdelete _result;\n\t\t}\n\t}\n\treturn result;\n}\nvoid World::onDebugControl(Control control, int action)\n{\n\t//if(action!=CONTROL_KEYACTION_REPEAT)\n\t//\tPRINT_CONTROL(\"A\", control, action);\n\n\t// Debug config\n\tif(action==CONTROL_KEYACTION_RELEASE)\n\t{\n\t\tif(control==GLFW_KEY_F4) this->gameOver(GAME_OVER_ENEMY_DESTROYED);\n\t\tif(control==GLFW_KEY_F5) debug_renderEntityMarkers = !debug_renderEntityMarkers;\n\t\tif(control==GLFW_KEY_F6) debug_renderEntityBounds = !debug_renderEntityBounds;\n\t\tif(control==GLFW_KEY_F7) debug_renderWireframe++;\n\t\tif(control==GLFW_KEY_F8) debug_renderNavGraph = !debug_renderNavGraph;\n\t\tif(control==GLFW_KEY_T)\n\t\t{\n\t\t\tthis->player->setPosition(glm::vec3(0,-20,2));\n\t\t\tthis->player->setOrientation(glm::quat());\n\t\t}\n\t\tif(control==GLFW_KEY_Y)\n\t\t{\n\t\t\tthis->player->setPosition(glm::vec3(0,-20,20));\n\t\t\tthis->player->setOrientation(glm::quat());\n\t\t}\n\t\tif(debug_renderWireframe>2)\n\t\t\tdebug_renderWireframe = 0;\n\t\tif(control==GLFW_KEY_O)\n\t\t{\n\t\t\tfor(unsigned int i=0;i<this->world_navigation_graph->numNodes;i++)\n\t\t\t{\n\t\t\t\tPathNode *n = &this->world_navigation_graph->nodes[i];\n\t\t\t\tif(aabb.checkInside(n->position))\n\t\t\t\t{\n\t\t\t\t\tPRINT_DEBUG(\"node:\" << i);\n\t\t\t\t\tn->current = true;\n\t\t\t\t\tif(a==0)\n\t\t\t\t\t\ta = n;\n\t\t\t\t\telse\n\t\t\t\t\t\tb = n;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n//13,84\n\t\tif(control==GLFW_KEY_I)\n\t\t{\n\t\t\tPRINT_DEBUG(\"Setting default\");\n\t\t\ta = &this->world_navigation_graph->nodes[0];\n\t\t\tb = &this->world_navigation_graph->nodes[1];\n\t\t\ta->current = true;\n\t\t\tb->current = true;\n\t\t}\n\t\tif(control==GLFW_KEY_P)\n\t\t{\n\t\t\tPRINT_DEBUG(\"Clearing navgraph\");\n\t\t\ta = 0;\n\t\t\tb = 0;\n\t\t\tfor(unsigned int i=0;i<this->world_navigation_graph->numNodes;i++)\n\t\t\t{\n\t\t\t\tPathNode *n = &this->world_navigation_graph->nodes[i];\n\t\t\t\tn->current = false;\n\t\t\t\tn->closed = false;\n\t\t\t\tn->open = false;\n\t\t\t}\n\t\t}\n\t\tif(control==GLFW_KEY_LEFT_BRACKET)\n\t\t{\n\t\t\tPRINT_DEBUG(\"Running path\");\n\t\t\tif(a!=0 && b!=0)\n\t\t\t{\n\t\t\t\ta->current = false;\n\t\t\t\tb->current = false;\n\t\t\t\tf->start(a,b);\n\t\t\t\tPRINT_DEBUG(\"START\");\n\t\t\t}\n\t\t}\n\t}\n\n\tif(control==GLFW_MOUSE_BUTTON_2 && action==CONTROL_MOUSEBUTTONACTION_RELEASE)\n\t{\n\t\tthis->enableSelector = true;\n\t}\n\tif(action==CONTROL_MOUSEBUTTONACTION_PRESS)\n\t{\n\t\tif(this->selector==SELECTOR_SECURITY_CAMERA)\n\t\t{\n\t\t\tSecurityCamera *cam = new SecurityCamera();\n\t\t\tcam->setOrientation(this->selectorCamera->getOrientation());\n\t\t\tthis->add(cam, this->selectorCamera->getPosition());\n\t\t\tthis->enableSelector = false;\n\t\t\tthis->selector = SELECTOR_OFF;\n\t\t\tthis->selectorCamera->doRender = false;\n\t\t\tthis->selectorTurret->doRender = false;\n\t\t}\n\t\telse if(this->selector==SELECTOR_TURRET)\n\t\t{\n\t\t\tTurret *turret = new Turret();\n\t\t\tturret->setOrientation(this->selectorTurret->getOrientation());\n\t\t\tthis->add(turret, this->selectorTurret->getPosition());\n\t\t\tthis->enableSelector = false;\n\t\t\tthis->selector = SELECTOR_OFF;\n\t\t\tthis->selectorCamera->doRender = false;\n\t\t\tthis->selectorTurret->doRender = false;\n\t\t}\n\t\telse if(this->selector==SELECTOR_REMOVE)\n\t\t{\n\t\t\tthis->entities.remove(this->selectorRemove);\n\t\t\tdelete this->selectorRemove;\n\t\t\tthis->selectorRemove = 0;\n\t\t\tthis->selectorCamera->doRender = false;\n\t\t\tthis->selectorTurret->doRender = false;\n\t\t}\n\t}\n}\nvoid World::onRayHit(glm::vec3 rayOrigin, float distance, glm::vec3 normal)\n{\n\t\n}\nvoid World::gameOver(int type)\n{\n\tPRINT_DEBUG(\"GAME OVER:\" << type);\n\tthis->gameView->onGameOver(type);\n}\n" }, { "alpha_fraction": 0.6915460824966431, "alphanum_fraction": 0.698400616645813, "avg_line_length": 18.893939971923828, "blob_id": "0e6e20a1aef3b6917eb3440fe7f38526ea274446", "content_id": "b936f4090c5a89be55c894ade0c54f4d9c5d8de3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1313, "license_type": "no_license", "max_line_length": 70, "num_lines": 66, "path": "/src/cpp/util/AssetManager.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_UTIL_ASSETMANAGER_HPP_\n#define FYP_UTIL_ASSETMANAGER_HPP_\n\nnamespace util {\n\tclass AssetManager;\n\tclass Asset;\n}\nnamespace render {\n\tclass SkeletalModel;\n}\n\n\n#define ASSET_MTLLIB 0\n#define ASSET_WAVEFRONT 1\n#define ASSET_MD5MESH 2\n#define ASSET_MD5ANIM 3\n#define ASSET_DDS 4\n#define ASSET_NAVMESH 5\n\n#include <mutex>\n#include <list>\n#include <thread>\n#include <vector>\n#include <ostream>\n#include \"AssetsMeta.h\"\n\nstd::ostream &operator<<(std::ostream &ost, const util::Asset &asset);\n\nnamespace util {\n\tclass Asset {\n\t\tpublic:\n\t\t\tAsset(int assetId);\n\t\t\tvirtual ~Asset();\n\t\t\tint getAssetID() const;\n\t\t\tstd::string getName() const;\n\t\t\tvirtual void write(std::ostream &ost) const;\n\t\t\tvirtual void postload() = 0;\n\t\tprivate:\n\t\t\tint _assetId;\n\t\t\tstd::string name;\n\t\tprotected:\n\t\t\tvoid setName(std::string name);\n\t};\n\tclass AssetManager {\n\t\tpublic:\n\t\t\tAssetManager();\n\t\t\t~AssetManager();\n\t\t\tstatic AssetManager *getAssetManager();\n\t\t\tvoid init();\n\t\t\tvoid cleanup();\n\t\t\tvoid run();\n\t\t\tbool postload();\n\t\t\tfloat getProgress();\n\t\t\tAsset *getAsset(int assetId);\n\t\tprivate:\n\t\t\tstatic AssetManager *instance;\n\t\t\tint progress_current;\n\t\t\tint progress_total;\n\t\t\tstd::thread *assetManagerThread;\n\t\t\tstd::mutex progress_mutex;\n\t\t\tAsset *assets[ASSETS_COUNT];\n\t\t\tbool preload_complete,postload_complete;\n\t};\n}\n\n#endif\n" }, { "alpha_fraction": 0.7272727489471436, "alphanum_fraction": 0.7272727489471436, "avg_line_length": 10, "blob_id": "a9715e4c75f05dc2d0a3be50895f062724fa2714", "content_id": "facdd6316489c9c628708d8e5dbc7a295bca1d09", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 110, "license_type": "no_license", "max_line_length": 33, "num_lines": 10, "path": "/src/cpp/gui/elements/ControlsOption.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"ControlsOption.hpp\"\n\nControlsOption::ControlsOption()\n{\n\t\n}\nControlsOption::~ControlsOption()\n{\n\t\n}\n" }, { "alpha_fraction": 0.7210963368415833, "alphanum_fraction": 0.725921630859375, "avg_line_length": 32.86274337768555, "blob_id": "0ead986770f49ee09b68ce114a7ea59093e07db7", "content_id": "718bd51afeaf8f3ecf9026459eb648f0b4318b53", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5181, "license_type": "no_license", "max_line_length": 265, "num_lines": 153, "path": "/src/cpp/render/shaders/ShaderProgram.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"util/Globals.hpp\"\n#include <iostream>\n\n#include \"ShaderProgram.hpp\"\n\nusing namespace render;\nusing namespace shaders;\n\nGLint *ShaderProgram::shader_files = 0;\nShaderProgram **ShaderProgram::shader_program = 0;\n\nShaderProgram::ShaderProgram(int shaderprogram_id, std::string shaderprogram_name, std::vector<GLint> shaderFiles, ShaderMask shaderVar_mask, ShaderMask shaderVar_overrideMask, std::vector<std::string> shaderVar_overrides, std::vector<std::string> shaderVar_custom)\n{\n\tthis->shaderprogram_id = shaderprogram_id;\n\tthis->shaderprogram_name = shaderprogram_name;\n\tthis->shaderFiles = shaderFiles;\n\tthis->shaderVar_mask = shaderVar_mask;\n\tthis->shaderVar_overrideMask = shaderVar_overrideMask;\n\tthis->shaderVar_overrides = shaderVar_overrides;\n\tthis->shaderVar_custom = shaderVar_custom;\n\n\tprogramID = glCreateProgram();\n\tfor(auto shader : shaderFiles)\n\t\tglAttachShader(programID, shader);\n\tglLinkProgram(programID);\n\t\n\tGLint compileStatus,infoLogLength;\n\tglGetProgramiv(programID, GL_LINK_STATUS, &compileStatus);\n\tglGetProgramiv(programID, GL_INFO_LOG_LENGTH, &infoLogLength);\n\tif(compileStatus==GL_FALSE)\n\t{\n\t\tchar *infoLog = new char[infoLogLength+1];\n\t\tglGetProgramInfoLog(programID, infoLogLength, NULL, infoLog);\n\t\tinfoLog[infoLogLength] = 0;\n\t\tstd::cerr << \"Failed to link program: \" << shaderprogram_name << \"\\n\" << infoLog << std::endl;\n\t\tstd::exit(1);\n\t}\n\tthis->shaderVar_locationsCount = SHADERVAR_COUNT+shaderVar_custom.size();\n\tthis->shaderVar_locations = new GLint[this->shaderVar_locationsCount];\n\tfor(int i=0;i<this->shaderVar_locationsCount;i++)\n\t{\n\t\tthis->shaderVar_locations[i] = -1;\n\t}\n\tthis->currentMaterial.assetId = -1;\n\tthis->currentMaterial.materialId = -1;\n}\nShaderProgram::~ShaderProgram()\n{\n\tdelete [] this->shader_program;\n}\nvoid ShaderProgram::useShader()\n{\n\tglUseProgram(this->programID);\n}\ninline GLint getNamedShaderLocation(bool uniform, GLuint programID, const char *name)\n{\n\tGLint a;\n\tif(uniform)\n\t\ta = glGetUniformLocation(programID, name);\n\telse\n\t\ta = glGetAttribLocation(programID, name);\n\treturn a;\n}\nGLint ShaderProgram::getShaderLocation(bool uniform, int shaderVar)\n{\n\tif(shaderVar<0 || shaderVar>=this->shaderVar_locationsCount)\n\t\tutil::Globals::fatalError(\"Attempt to access invalid shaderVar for program \"+std::to_string(this->programID));\n\tGLint &sl = this->shaderVar_locations[shaderVar];\n\tif(sl==-1)\n\t{\n\t\tif(shaderVar<SHADERVAR_COUNT)//If common shadervar\n\t\t{\n\t\t\tif((1<<shaderVar)&this->shaderVar_overrideMask)//If shaderVar is overriden\n\t\t\t{\n\t\t\t\tint shaderVarCustomID = 0;\n\t\t\t\tfor(int i=0;i<shaderVar;i++)\n\t\t\t\t{\n\t\t\t\t\tif((1<<i)&shaderVar_overrideMask)\n\t\t\t\t\t{\n\t\t\t\t\t\tshaderVarCustomID++;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tsl = getNamedShaderLocation(uniform, this->programID, this->shaderVar_overrides[shaderVarCustomID].c_str());\n\t\t\t}\n\t\t\tif((1<<shaderVar)&this->shaderVar_mask)\n\t\t\t{\n\t\t\t\tsl = getNamedShaderLocation(uniform, this->programID, shaderNames[shaderVar].c_str());\n\t\t\t}\n\t\t}\n\t\telse//If custom shaderVar\n\t\t{\n\t\t\tsl = getNamedShaderLocation(uniform, this->programID, this->shaderVar_custom[shaderVar-SHADERVAR_COUNT].c_str());\n\t\t}\n\t}\n\treturn sl;\n}\nbool ShaderProgram::setVertexAttributePointer(int shaderVar, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer)\n{\n\tint location = getShaderLocation(false, shaderVar);\n\tif(location!=-1)\n\t{\n\t\tglEnableVertexAttribArray(location);\n\t\tglVertexAttribPointer(location, size, type, normalized, stride, pointer);\n\t\treturn true;\n\t}\n\treturn false;\n}\n\nbool ShaderProgram::setMaterial(MaterialAsset mat)\n{\n\tif(this->currentMaterial!=mat)\n\t{\n\t\tthis->currentMaterial = mat;\n\t\tutil::Asset *asset = util::AssetManager::getAssetManager()->getAsset(mat.assetId);\n\t\trender::MaterialLibrary *mtlAsset = dynamic_cast<render::MaterialLibrary *>(asset);\n\t\tif(asset==0 || mtlAsset==0)\n\t\t\tutil::Globals::fatalError(\"Failed to find material asset\");\n\t\tmtlAsset->updateShader(this, mat.materialId);\n\t\treturn true;\n\t}\n\treturn false;\n}\nShaderProgram *ShaderProgram::getShader(int shaderIndex)\n{\n\treturn shader_program[shaderIndex];\n}\nGLint ShaderProgram::compileShader(std::string name, int shaderTypeId, std::string shaderCode)\n{\n\tint shaderType = shaderTypeId==0 ? GL_VERTEX_SHADER : GL_FRAGMENT_SHADER;\n\tconst GLchar *code = shaderCode.c_str();\n\tconst GLint length = shaderCode.length();\n\tGLuint shaderId = glCreateShader(shaderType);\n\tglShaderSource(shaderId, 1, &code, &length);\n\tglCompileShader(shaderId);\n\n\tGLint compileStatus;\n\tglGetShaderiv(shaderId, GL_COMPILE_STATUS, &compileStatus);\n\tif(compileStatus==GL_FALSE)\n\t{\n\t\tGLint infoLogLength;\n\t\tglGetShaderiv(shaderId, GL_INFO_LOG_LENGTH, &infoLogLength);\n\t\tchar *infoLog = new char [infoLogLength+1];\n\t\tglGetShaderInfoLog(shaderId, infoLogLength, NULL, infoLog);\n\t\tinfoLog[infoLogLength] = 0;\n\t\tstd::cerr << \"Failed to compile \" << (shaderType==GL_VERTEX_SHADER ? \"vertex\" : \"fragment\") << \" shader \" << name << \":\" << std::endl << \"Error Log [#\" << (infoLogLength-1) << \"]: \" << infoLog << std::endl << \"Vertcode:\" << std::endl << shaderCode << std::endl;\n\t\treturn -1;\n\t}\n\treturn shaderId;\n}\nvoid ShaderProgram::loadShaders()\n{\n\t#include \"render/shaders/ShaderLoader.c\"\n}\n" }, { "alpha_fraction": 0.6845124363899231, "alphanum_fraction": 0.6883364915847778, "avg_line_length": 19.115385055541992, "blob_id": "53d96364118de88956e2c796737aae27d32fe4b6", "content_id": "267b4de769d8f450bdccbf869d1c5882e6a15958", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 523, "license_type": "no_license", "max_line_length": 57, "num_lines": 26, "path": "/src/cpp/main.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"main.hpp\"\n#include <fstream>\n#include <ctime>\n#include <csignal>\n\n#include \"gui/WindowScreenManager.hpp\"\n\nstd::ostream *debugfile = 0;\n\nvoid sigtermHandler(int sig){\n\tstd::cout << \"Interrupt\" << std::endl;\n}\n\nint main(int argc, char *argv[])\n{\n\tstd::signal(SIGHUP, sigtermHandler);\n\n\tstd::ofstream df;\n\tdf.open(\"tmp/debug.log\", std::ios::out|std::ios::trunc);\n\tdebugfile = dynamic_cast<std::ostream *>(&df);\n\n\tstd::srand(static_cast<unsigned int>(std::time(0l)));\n\n\tWindowScreenManager manager;\n\tmanager.run();\n}\n" }, { "alpha_fraction": 0.7560038566589355, "alphanum_fraction": 0.7560038566589355, "avg_line_length": 32.58064651489258, "blob_id": "b8cfe7cea7c1b8f55bd7c22788f29ec013992a3c", "content_id": "8030d7a8b1220ad6b4b5fc2d2edae9d0541ffe14", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1041, "license_type": "no_license", "max_line_length": 189, "num_lines": 31, "path": "/src/cpp/ai/path/PathExplorer.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_AI_PATH_PATHEXPLORER_HPP_\n#define FYP_AI_PATH_PATHEXPLORER_HPP_\n\n#include <set>\n#include \"PathCommon.h\"\n#include \"ai/path/NavigationGraph.hpp\"\n\nnamespace ai {\n\tnamespace path {\n\t\tclass PathExplorer {\n\t\t\tprivate:\n\t\t\t\tint groupWhitelist,poiGroupMask,portalGroupMask;\n\t\t\t\tPathNode *lastPortalNode,*lastLastPortalNode;\n\t\t\t\tstd::set<int> previousVisitedPortalNodes;\n\t\t\t\tstd::set<int> previousVisitedPOINodes;\n\t\t\t\tNavigationGraph *navGraph;\n\t\t\tpublic:\n\t\t\t\tPathExplorer(NavigationGraph *navigationGraph);\n\t\t\t\t~PathExplorer();\n\t\t\t\tvoid setGroupWhitelist(int groupMask);\n\t\t\t\tvoid setGroupPOI(int groupMask);\n\t\t\t\tvoid setGroupPortal(int groupMask);\n\t\t\t\tPathNode *getNext_legacy(PathNode *current);\n\t\t\t\tfloat getNodeLinkWeight_legacy(PathNode *node, PathNodeLink *pathNodeLink, int depth);\n\t\t\t\tPathNode *getNext(PathNode *current);\n\t\t\t\tfloat getNodeLinkWeight(PathNodeLink *pathNodeLink, PathNode *node, PathNode **selectedPortalNode, int depth, int &portalNodeDepth, std::set<int> previouslySearchedLinks, float weight);\n\t\t};\n\t}\n}\n\n#endif\n" }, { "alpha_fraction": 0.6761844158172607, "alphanum_fraction": 0.688307523727417, "avg_line_length": 28.158140182495117, "blob_id": "e7c5150fe8e43c15ab716ebff85cac8f21c40515", "content_id": "4165f69fca0e2665e15630b5b8d0d7813694b6e0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6269, "license_type": "no_license", "max_line_length": 206, "num_lines": 215, "path": "/src/cpp/ai/path/PathExplorer.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"PathExplorer.hpp\"\n\nusing namespace ai::path;\n\nPathExplorer::PathExplorer(NavigationGraph *navigationGraph)\n{\n\tthis->groupWhitelist = 0;\n\tthis->poiGroupMask = 0;\n\tthis->portalGroupMask = 0;\n\tthis->lastPortalNode = 0;\n\tthis->lastLastPortalNode = 0;\n\tthis->navGraph = navigationGraph;\n}\nPathExplorer::~PathExplorer()\n{\n\t\n}\nvoid PathExplorer::setGroupWhitelist(int groupMask)\n{\n\tthis->groupWhitelist = groupMask;\n}\nvoid PathExplorer::setGroupPOI(int groupMask)\n{\n\tthis->poiGroupMask = groupMask;\n}\nvoid PathExplorer::setGroupPortal(int groupMask)\n{\n\tthis->portalGroupMask = groupMask;\n}\n//float PathExplorer::getNodeLinkWeight(PathNode *node, PathNodeLink *pathNodeLink, int depth)\n//{\n//\tif(depth>4)\n//\t\treturn 0;\n//\tfloat weight = 0;\n//\tif(this->visited.find(pathNodeLink->id)==this->visited.end())//If not visited\n//\t\tweight -= 1000/(1+depth);\n//\tPathNode *other = pathNodeLink->getOther(node);\n//\tfloat minWeight = std::numeric_limits<float>::max(),t;\n//\tPathNode *p;\n//\tfor(auto *l : other->links)\n//\t{\n//\t\tp = l->getOther(other);\n//\t\tbool b = depth<2 && this->navGraph->getPathNodeGroupMask(p->id)&this->groupPOI && this->visitedPOI.find(p->id)==this->visitedPOI.end();\n//\t\tif(this->groupWhitelist==0 || this->navGraph->getPathNodeGroupMask(p->id)&this->groupWhitelist || b)\n//\t\t{\n//\t\t\tt = this->getNodeLinkWeight(node, l, depth+1);\n//\t\t\tif(b)\n//\t\t\t\tt -= 10000;\n//\t\t\tif(t<minWeight)\n//\t\t\t{\n//\t\t\t\tminWeight = t;\n//\t\t\t}\n//\t\t}\n//\t}\n//\tif(minWeight>=std::numeric_limits<float>::max()-1)\n//\t\tminWeight = 0;\n//\treturn weight+minWeight;\n//}\n////Prefer: Unexplored, Shortest, Unexploded Children\n//PathNode *PathExplorer::getNext_legacy(PathNode *current)\n//{\n//\tPathNodeLink *selectedLink = 0,*backupLink = 0;\n//\tPathNode *selectedLinkNode = 0,*backupLinkNode=0,*p;\n//\tint count = 0;\n//\tfloat min = std::numeric_limits<float>::max(),t;\n//\tfor(auto *l : current->links)\n//\t{\n//\t\tp = l->getOther(current);\n//\t\tbool b = this->navGraph->getPathNodeGroupMask(p->id)&this->groupPOI && this->visitedPOI.find(p->id)==this->visitedPOI.end();\n//\t\tif(this->groupWhitelist==0 || this->navGraph->getPathNodeGroupMask(p->id)&this->groupWhitelist || b)\n//\t\t{\n//\t\t\tif(l==this->previousVisitedLink)\n//\t\t\t{\n//\t\t\t\tbackupLinkNode = p;\n//\t\t\t\tbackupLink = l;\n//\t\t\t\tcontinue;\n//\t\t\t}\n//\t\t\tcount++;\n//\t\t\tt = getNodeLinkWeight(current, l, 0);\n//\t\t\tif(b)\n//\t\t\t\tt -= 10000;\n//\t\t\tif(t<min)//If not visited\n//\t\t\t{\n//\t\t\t\tselectedLink = l;\n//\t\t\t\tselectedLinkNode = l->getOther(current);\n//\t\t\t\tmin = t;\n//\t\t\t}\n//\t\t}\n//\t\t\n//\t}\n//\tif(count==0)\n//\t{\n//\t\tselectedLink = backupLink;\n//\t\tselectedLinkNode = backupLinkNode;\n//\t}\n//\tif(this->navGraph->getPathNodeGroupMask(selectedLinkNode->id)&this->groupPOI)\n//\t{\n//\t\tthis->visitedPOI.insert(selectedLinkNode->id);\n//\t}\n//\tthis->previousVisitedLink = selectedLink;\n//\tthis->visited.insert(selectedLink->id);\n//\treturn selectedLinkNode;\n//}\ninline std::string head(int len)\n{\n\tstd::string s;\n\tfor(int i=0;i<len;i++)\n\t\ts += \"\t\";\n\ts = s+std::to_string(len)+\" \";\n\treturn s;\n}\nPathNode *PathExplorer::getNext(PathNode *current)\n{\n\tPathNode *selectedPortalNode=0,*other,*otherPortalNode=0;\n\tint portalNodeDepth;\n\tfloat min = std::numeric_limits<float>::max(),t;\n\tfor(auto *l : current->links)\n\t{\n\t\tother = l->getOther(current);\n\t\tif(other==this->lastPortalNode || other==this->lastLastPortalNode)\n\t\t\tcontinue;\n\t\totherPortalNode = 0;\n\t\tportalNodeDepth = 0;\n\t\tt = this->getNodeLinkWeight(l, other, &otherPortalNode, 1, portalNodeDepth, {}, 0)+std::rand()%5;\n\t\tif(min>t && otherPortalNode!=0)\n\t\t{\n\t\t\tmin = t;\n\t\t\tselectedPortalNode = otherPortalNode;\n\t\t}\n\t}\n\tif(selectedPortalNode==0)\n\t{\n\t\tselectedPortalNode = this->lastPortalNode;\n\t\tif(selectedPortalNode==0)\n\t\t\treturn 0;\n\t}\n\telse\n\t{\n\t\tthis->previousVisitedPortalNodes.insert(selectedPortalNode->id);\n\t}\n\tthis->lastLastPortalNode = this->lastPortalNode;\n\tthis->lastPortalNode = selectedPortalNode;\n\tif(this->navGraph->getPathNodeGroupMask(selectedPortalNode->id)&this->poiGroupMask)\n\t{\n\t\tthis->previousVisitedPOINodes.insert(selectedPortalNode->id);\n\t}\n\treturn selectedPortalNode;\n}\nfloat PathExplorer::getNodeLinkWeight(PathNodeLink *pathNodeLink, PathNode *node, PathNode **selectedPortalNode, int depth, int &selectedPortalNodeDepth, std::set<int> previouslySearchedLinks, float weight)\n{\n\tif(depth>5)\n\t\treturn 0;\n\tweight += static_cast<float>(pathNodeLink->dist)/depth;\n\tpreviouslySearchedLinks.insert(pathNodeLink->id);\n\tif(depth<3 && this->navGraph->getPathNodeGroupMask(node->id)&this->poiGroupMask)\n\t{\n\t\tif(*selectedPortalNode==0)\n\t\t{\n\t\t\t*selectedPortalNode = node;\n\t\t\tselectedPortalNodeDepth = depth;\n\t\t}\n\t\tif(this->previousVisitedPOINodes.find(node->id)==this->previousVisitedPOINodes.end())//If we've already visited this POI node\n\t\t\tweight -= 10000;\n\t\telse\n\t\t\tweight += 100;\n\t}\n\tif(this->navGraph->getPathNodeGroupMask(node->id)&this->portalGroupMask)\n\t{\n\t\tif(*selectedPortalNode==0)\n\t\t{\n\t\t\t*selectedPortalNode = node;\n\t\t\tselectedPortalNodeDepth = depth;\n\t\t}\n\t\tif(this->previousVisitedPortalNodes.find(node->id)!=this->previousVisitedPortalNodes.end())//If we've already visited this portal node\n\t\t\tweight += 1000/depth;\n\t}\n\tif(node->links.size()==2)//Waypoint node\n\t{\n\t\tfor(auto *l : node->links)\n\t\t{\n\t\t\tif(l!=pathNodeLink)//If this was NOT how we got here (its the other)\n\t\t\t{\n\t\t\t\tweight = this->getNodeLinkWeight(l, l->getOther(node), selectedPortalNode, depth+1, selectedPortalNodeDepth, previouslySearchedLinks, weight);\n\t\t\t}\n\t\t}\n\t}\n\telse// if(node->links.size()>2)//Junction node\n\t{\n\t\tint otherPortalNodeDepth;\n\t\tfloat min = std::numeric_limits<float>::max(),w;\n\t\tPathNode *otherPortalNode=0,*other,*selectedOtherPortalNode=0;\n\t\tfor(auto *l : node->links)\n\t\t{\n\t\t\tif(l==pathNodeLink)//If this was how we got here\n\t\t\t\tcontinue;\n\t\t\tif(previouslySearchedLinks.find(l->id)!=previouslySearchedLinks.end())//If link was already searched\n\t\t\t\tcontinue;\n\t\t\totherPortalNode = 0;\n\t\t\tother = l->getOther(node);\n\t\t\tw = this->getNodeLinkWeight(l, other, &otherPortalNode, depth+1, otherPortalNodeDepth, previouslySearchedLinks, 0);\n\t\t\tif(min>w)\n\t\t\t{\n\t\t\t\tmin = w;\n\t\t\t\tselectedOtherPortalNode = otherPortalNode;\n\t\t\t}\n\t\t}\n\t\tif(*selectedPortalNode==0 && selectedOtherPortalNode!=0)\n\t\t{\n\t\t\t*selectedPortalNode = selectedOtherPortalNode;\n\t\t}\n\t\tif(selectedOtherPortalNode!=0)\n\t\t\tweight += min;\n\t}\n\treturn weight;\n}\n" }, { "alpha_fraction": 0.7034482955932617, "alphanum_fraction": 0.7034482955932617, "avg_line_length": 13.5, "blob_id": "7c44235c07b166d9047ae24e3e6a9f83aee586d7", "content_id": "0f474a2f32b45b282250492ee872f0dd264a684d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 145, "license_type": "no_license", "max_line_length": 35, "num_lines": 10, "path": "/src/cpp/ai/path/PathHelper.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_AI_PATH_PATHHELPER_HPP_\n#define FYP_AI_PATH_PATHHELPER_HPP_\n\nclass PathHelper {\n\tpublic:\n\t\tPathHelper();\n\t\t~PathHelper();\n};\n\n#endif\n" }, { "alpha_fraction": 0.7570093274116516, "alphanum_fraction": 0.7570093274116516, "avg_line_length": 26.319149017333984, "blob_id": "383fe2f4b60a7f40fa14950e9665d6ddcdc85855", "content_id": "ac164d1d41a6f697d1447787eebfdff102b8bde2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1284, "license_type": "no_license", "max_line_length": 62, "num_lines": 47, "path": "/src/cpp/render/SkeletalAnimation.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_RENDER_SKELETALANIMATION_HPP_\n#define FYP_RENDER_SKELETALANIMATION_HPP_\n\nnamespace render {\n\tclass RenderManager;\n\ttypedef struct SkeletalAnimationJoint SkeletalAnimationJoint;\n\ttypedef struct SkeletalAnimationBound SkeletalAnimationBound;\n}\n\n#include \"render/SkeletalModel.hpp\"\n#include \"util/AssetManager.hpp\"\n#include \"util/Boundaries.hpp\"\n\nnamespace render {\n\tstruct SkeletalAnimationJoint {\n\t\tstd::string name;\n\t\tint parent;\n\t\tint flags;\n\t\tint startIndex;\n\t};\n\tclass SkeletalAnimation : public util::Asset {\n\t\tpublic:\n\t\t\tSkeletalAnimation(int assetId, std::istream &fp);\n\t\t\t~SkeletalAnimation();\n\t\t\tvirtual void write(std::ostream &ost) const;\n\t\t\tvirtual void postload();\n\t\t\tvoid renderBounds(RenderManager &rManager, double time);\n\t\t\tdouble getAnimationDuration();\n\t\t\tint getFrame(double time);\n\t\t\tSkeleton getFrameSkeleton(int frame);\n\t\t\tutil::Boundaries::AABB &getFrameBounds(int frame);\n\t\t\tSkeleton getInterpolatedSkeleton(int firstFrame);\n\t\t\tSkeleton getSkeleton(double time);\n\t\tprivate:\n\t\t\tint numFrames;\n\t\t\tint numJoints;\n\t\t\tint frameRate;\n\t\t\tint numAnimatedComponents;\n\t\t\tstd::vector<SkeletalAnimationJoint> hierarchy;\n\t\t\tstd::vector<util::Boundaries::AABB *> bounds;\n\t\t\tSkeleton baseFrame;\n\t\t\tstd::vector<Skeleton> frames;\n\t\t\tfloat *frameData;\n\t};\n}\n\n#endif\n" }, { "alpha_fraction": 0.6842105388641357, "alphanum_fraction": 0.6842105388641357, "avg_line_length": 8.5, "blob_id": "7c71a727c6eda5572326bb36f6199c7be82d58ff", "content_id": "917c8ff8d01c92a8b2e6651f531b9b4d37ea6e43", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 95, "license_type": "no_license", "max_line_length": 27, "num_lines": 10, "path": "/src/cpp/ai/GoalManager.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"GoalManager.hpp\"\n\nGoalManager::GoalManager()\n{\n\t\n}\nGoalManager::~GoalManager()\n{\n\t\n}\n" }, { "alpha_fraction": 0.6583113670349121, "alphanum_fraction": 0.668865442276001, "avg_line_length": 25.59649085998535, "blob_id": "7c46c89d59eca0e3a01f675da8873aa2b939c8ea", "content_id": "6f645f844f7ef1e7b070cfe350fb4a75604d47ff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1516, "license_type": "no_license", "max_line_length": 159, "num_lines": 57, "path": "/src/cpp/util/Boundaries.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_UTIL_BOUNDARIES_HPP_\n#define FYP_UTIL_BOUNDARIES_HPP_\n\n#include \"render/RenderManager.hpp\"\n\nnamespace util {\n\tnamespace Boundaries {\n\t\tstruct Sphere {\n\t\t\tglm::vec3 center;\n\t\t\tfloat radius;\n\t\t};\n\t\tstruct Raycast {\n\t\t\tglm::vec3 origin;\n\t\t\tglm::vec3 direction;\n\t\t\tfloat maxDistance;\n\t\t};\n\t\tstruct RaycastResult {\n\t\t\tfloat distance;\n\t\t\tglm::vec3 hitNormal;\n\t\t\tbool hit;\n\t\t};\n\t\tclass OBB {\n\t\t\tpublic:\n\t\t\t\tglm::vec3 min,max;\n\t\t\t\tOBB(const glm::vec3 &min, const glm::vec3 &max);\n\t\t\t\tOBB(const float minX, const float minY, const float minZ, const float maxX, const float maxY, const float maxZ);\n\t\t};\n\t\tclass AABB {\n\t\t\tpublic:\n\t\t\t\tfloat boxCenter[3];\n\t\t\t\tfloat boxHalfSize[3];\n\t\t\t\tAABB();\n\t\t\t\tAABB(const float boxCenterX, const float boxCenterY, const float boxCenterZ, const float boxHalfSizeX, const float boxHalfSizeY, const float boxHalfSizeZ);\n\t\t\t\tAABB(const glm::vec3 &min, const glm::vec3 &max);\n\t\t\t\t~AABB();\n\t\t\t\tAABB translate(const glm::vec3 &offset);\n\t\t\t\tOBB rotate(const glm::quat &rotation);\n\t\t\t\tbool checkInside(const glm::vec3 v);\n\t\t\t\tbool checkIntersect(const AABB &aabb);\n\t\t\t\tbool checkIntersect(const Sphere &rbb);\n\t\t\t\tvoid render(render::RenderManager &rManager, glm::vec4 color, bool solid);\n\t\t\t\tfloat rayCastDistance(Raycast &raycast);\n\t\t\t\tbool rayCastCheck(Raycast &raycast);\n\t\t\t\tRaycastResult *rayCast(Raycast &raycast);\n\t\t\t\tglm::vec3 min();\n\t\t\t\tglm::vec3 max();\n\t\t\t\tfloat minX();\n\t\t\t\tfloat minY();\n\t\t\t\tfloat minZ();\n\t\t\t\tfloat maxX();\n\t\t\t\tfloat maxY();\n\t\t\t\tfloat maxZ();\n\t\t};\n\t}\n}\n\n#endif\n" }, { "alpha_fraction": 0.6756756901741028, "alphanum_fraction": 0.7027027010917664, "avg_line_length": 17.5, "blob_id": "5062aa140b12f5f285a305e666be50f49ddf7679", "content_id": "b859a4e02549fa68d892979143ef24938ebffdbf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 148, "license_type": "no_license", "max_line_length": 56, "num_lines": 8, "path": "/src/cpp/render/shaders/code/solidColor_vertex.c", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "//vertex\n\nimport in vec3 vertex_position;\nimport uniform mat4 matrix_MVP;\n\nvoid main() {\n gl_Position = matrix_MVP * vec4(vertex_position, 1);\n}\n" }, { "alpha_fraction": 0.6588869094848633, "alphanum_fraction": 0.6834231019020081, "avg_line_length": 40.775001525878906, "blob_id": "1d44ddd8a6bac03c435763eb8713547ef7a5ef87", "content_id": "de58de44e1651170091407f684a3a69d259a647d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 1671, "license_type": "no_license", "max_line_length": 62, "num_lines": 40, "path": "/src/cpp/render/MaterialData.h", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef __MATERIALDATA_H_INCLUDED__\n#define __MATERIALDATA_H_INCLUDED__\n\n#define MATERIAL_INDEX_Ka 0\n#define MATERIAL_INDEX_Kd 1\n#define MATERIAL_INDEX_Ks 2\n#define MATERIAL_INDEX_Tf 3\n#define MATERIAL_INDEX_d 4\n#define MATERIAL_INDEX_Ns 5\n#define MATERIAL_INDEX_Ni 6\n#define MATERIAL_INDEX_illum 7\n#define MATERIAL_INDEX_sharpness 8\n#define MATERIAL_INDEX_map_Ka 9\n#define MATERIAL_INDEX_map_Kd 10\n#define MATERIAL_INDEX_map_Ks 11\n#define MATERIAL_INDEX_map_Ns 12\n#define MATERIAL_INDEX_map_d 13\n#define MATERIAL_INDEX_disp 14\n#define MATERIAL_INDEX_decal 15\n#define MATERIAL_INDEX_bump 16\n\n#define MATERIAL_MASK_Ka (1<<MATERIAL_INDEX_Ka )\n#define MATERIAL_MASK_Kd (1<<MATERIAL_INDEX_Kd )\n#define MATERIAL_MASK_Ks (1<<MATERIAL_INDEX_Ks )\n#define MATERIAL_MASK_Tf (1<<MATERIAL_INDEX_Tf )\n#define MATERIAL_MASK_d (1<<MATERIAL_INDEX_d )\n#define MATERIAL_MASK_Ns (1<<MATERIAL_INDEX_Ns )\n#define MATERIAL_MASK_Ni (1<<MATERIAL_INDEX_Ni )\n#define MATERIAL_MASK_illum (1<<MATERIAL_INDEX_illum )\n#define MATERIAL_MASK_sharpness (1<<MATERIAL_INDEX_sharpness)\n#define MATERIAL_MASK_map_Ka (1<<MATERIAL_INDEX_map_Ka )\n#define MATERIAL_MASK_map_Kd (1<<MATERIAL_INDEX_map_Kd )\n#define MATERIAL_MASK_map_Ks (1<<MATERIAL_INDEX_map_Ks )\n#define MATERIAL_MASK_map_Ns (1<<MATERIAL_INDEX_map_Ns )\n#define MATERIAL_MASK_map_d (1<<MATERIAL_INDEX_map_d )\n#define MATERIAL_MASK_disp (1<<MATERIAL_INDEX_disp )\n#define MATERIAL_MASK_decal (1<<MATERIAL_INDEX_decal )\n#define MATERIAL_MASK_bump (1<<MATERIAL_INDEX_bump )\n\n#endif\n" }, { "alpha_fraction": 0.6875, "alphanum_fraction": 0.7083333134651184, "avg_line_length": 13.399999618530273, "blob_id": "63fbee58a383a1280390d59422a179f4ef2d7b19", "content_id": "0811b764ea634ac9d126361f26c48a3937bac46c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 144, "license_type": "no_license", "max_line_length": 43, "num_lines": 10, "path": "/src/cpp/render/shaders/code/billboard_fragment.c", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "//fragment\n\nin vec2 UV;\nimport uniform sampler2D material_map_Kd;\n\nout vec4 color;\n\nvoid main() {\n\tcolor = texture(material_map_Kd, UV).rgba;\n}\n" }, { "alpha_fraction": 0.6666666865348816, "alphanum_fraction": 0.6666666865348816, "avg_line_length": 18.636363983154297, "blob_id": "81d0d303842e335966cb942c75c5ff9a3bfdf647", "content_id": "70cfd52cf3ca05a2bd7983b91569371d70225746", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 432, "license_type": "no_license", "max_line_length": 77, "num_lines": 22, "path": "/src/cpp/gui/EventHandler.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_GUI_EVENTHANDLER_HPP_\n#define FYP_GUI_EVENTHANDLER_HPP_\n\nclass EventHandler {\n\tpublic:\n\t\tEventHandler();\n\t\t~EventHandler();\n\t\t/**\n\t\t * On control boolean update\n\t\t */\n\t\tbool onControlEvent(int control, bool state);\n\t\t/**\n\t\t * On control delta update\n\t\t */\n\t\tbool onControlEvent(int control, double x, double y, double dx, double dy);\n\t\t/**\n\t\t * Called when the screen manager resizes\n\t\t */\n\t\tvoid onResize();\n};\n\n#endif\n" }, { "alpha_fraction": 0.6977491974830627, "alphanum_fraction": 0.7234726548194885, "avg_line_length": 19.733333587646484, "blob_id": "056738cffeb462e5f2fa2873e73430b8ca3f2bd5", "content_id": "da7e83c8f7a1ca242d708f2bd96aa3cd0eac704a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 311, "license_type": "no_license", "max_line_length": 76, "num_lines": 15, "path": "/src/cpp/render/shaders/code/fuzzymodel_vertex.c", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "//vertex\n\nimport in vec3 vertex_position;\nimport in vec3 vertex_color;\n\nimport uniform mat4 matrix_M;\nimport uniform mat4 matrix_V;\nimport uniform mat4 matrix_P;\n\nout vec3 vertexColor;\n\nvoid main() {\n gl_Position = matrix_P * matrix_V * matrix_M * vec4(vertex_position, 1);\n vertexColor = vertex_color;\n}\n" }, { "alpha_fraction": 0.7137490510940552, "alphanum_fraction": 0.7231404781341553, "avg_line_length": 21.94827651977539, "blob_id": "fffbb6ad68ce8c9b153fdecfd552725b11addd43", "content_id": "86a602b250e1daf15be7e94c2b7ccfa3b0ba7a14", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2662, "license_type": "no_license", "max_line_length": 103, "num_lines": 116, "path": "/src/cpp/world/Entity.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"render/shaders/ShaderUtils.hpp\"\n#include \"render/BasicShapes.hpp\"\n\n//debug\n#include <glm/glm.hpp>\n#include <glm/gtc/matrix_transform.hpp>\n#include <glm/gtx/quaternion.hpp>\n\n#include \"Entity.hpp\"\n\nusing namespace world;\nusing namespace util::Boundaries;\n\nEntity::Entity()\n{\n\tthis->bounds = 0;\n\tthis->doRender = true;\n\tthis->healthCycleIndex = 0;\n\tthis->healthCycleBullets = 0;\n\tthis->healthCycleDuration = 0;\n\tthis->lastDamageTime = 0;\n}\nEntity::~Entity()\n{\n\t\n}\n\nvoid Entity::addedToWorld(world::World *world, double spawnTime)\n{\n\tthis->world = world;\n\tthis->spawnTime = spawnTime;\n}\nWorld &Entity::getWorld()\n{\n\treturn *this->world;\n}\ndouble Entity::getSpawnTime()\n{\n\treturn this->spawnTime;\n}\nglm::vec3 Entity::getPosition()\n{\n\treturn this->position;\n}\nvoid Entity::setPosition(glm::vec3 location)\n{\n\tthis->position = location;\n}\nvoid Entity::translate(glm::vec3 offset)\n{\n\tthis->position += offset;\n}\nglm::quat Entity::getOrientation() {\n\treturn this->orientation;\n}\nvoid Entity::setOrientation(glm::quat rotation)\n{\n\tthis->orientation = rotation;\n}\nAABB *Entity::getBounds()\n{\n\treturn this->bounds;\n}\nvoid Entity::tick(util::DeltaTime &deltaTime)\n{\n}\nvoid Entity::render(render::RenderManager &rManager)\n{\n\t\n}\nvoid Entity::renderDebug(render::RenderManager &rManager, bool renderPositionMarker, bool renderBounds)\n{\n\tif(renderPositionMarker)\n\t\trManager.renderOrientation(this->getPosition(), this->getOrientation());\n\tif(renderBounds && this->getBounds()!=0)\n\t{\n\t\tglm::mat4 a = rManager.M;\n\t\trManager.M = glm::translate(glm::mat4(), this->getPosition())*glm::toMat4(this->getOrientation());\n\t\trManager.markMDirty();\n\t\tthis->getBounds()->render(rManager, glm::vec4(1.f, 1.f, 0.f, 1.f), false);\n\t\trManager.M = a;\n\t}\n}\nvoid Entity::attack(double time, glm::vec3 direction, int type)\n{\n\tif(this->healthCycleBullets==0)//If no bullets can kill, skip damage\n\t\treturn;\n\tthis->lastDamageTime[this->healthCycleIndex] = time;\n\tint c = 0;\n\tfor(int i=0;i<this->healthCycleBullets;i++)\n\t{\n\t\tif((time-this->lastDamageTime[i])<this->healthCycleDuration)\n\t\t\tc++;\n\t}\n\tif(c==this->healthCycleBullets)\n\t{\n\t\tthis->die(time, direction, type);\n\t\treturn;\n\t}\n\tthis->healthCycleIndex++;\n\tif(this->healthCycleIndex>=this->healthCycleBullets)\n\t\tthis->healthCycleIndex = 0;\n}\nvoid Entity::die(double time, glm::vec3 direction, int type)\n{\n\t\n}\nvoid Entity::setBulletHealth(double healthCycleDuration, int healthCycleBullets)\n{\n\tthis->healthCycleIndex = 0;\n\tthis->healthCycleBullets = healthCycleBullets;\n\tthis->healthCycleDuration = healthCycleDuration;\n\tif(this->lastDamageTime!=0)\n\t\tdelete this->lastDamageTime;\n\tthis->lastDamageTime = new double[this->healthCycleBullets];\n}\n" }, { "alpha_fraction": 0.7057745456695557, "alphanum_fraction": 0.7076076865196228, "avg_line_length": 26.9743595123291, "blob_id": "2c578932e44c082b3f3d58a40f1cc0a5e901eb38", "content_id": "157c6573fff8eceb941332c5afe985899baed8ff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1091, "license_type": "no_license", "max_line_length": 76, "num_lines": 39, "path": "/src/cpp/util/StreamUtils.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_UTIL_STREAMUTILS_HPP_\n#define FYP_UTIL_STREAMUTILS_HPP_\n\n#include <string>\n#include <istream>\n\n//#define ENABLE_STREAMUTILS_DEBUG\n\n#define readVec3f(x) glm::vec3(readFloat(x), readFloat(x), readFloat(x))\n\nnamespace util {\n\tnamespace StreamUtils {\n\n#ifdef ENABLE_STREAMUTILS_DEBUG\n\t\tstd::string readString(const char *file, int line, std::istream &fp);\n\t\tint readInt(const char *file, int line, std::istream &fp);\n\t\tfloat readFloat(const char *file, int line, std::istream &fp);\n\t\tbool readBool(const char *file, int line, std::istream &fp);\n#else\n\t\tstd::string readString(std::istream &fp);\n\t\tint readInt(std::istream &fp);\n\t\tfloat readFloat(std::istream &fp);\n\t\tbool readBool(std::istream &fp);\n#endif\n\t}\n}\n\n#ifndef __STRSKIP\n#ifdef ENABLE_STREAMUTILS_DEBUG\n\n#define readString(fp) util::StreamUtils::readString(__FILE__, __LINE__, fp)\n#define readInt(fp) util::StreamUtils::readInt(__FILE__, __LINE__, fp)\n#define readFloat(fp) util::StreamUtils::readFloat(__FILE__, __LINE__, fp)\n#define readBool(fp) util::StreamUtils::readBool(__FILE__, __LINE__, fp)\n\n#endif\n#endif\n\n#endif\n" }, { "alpha_fraction": 0.8160919547080994, "alphanum_fraction": 0.8160919547080994, "avg_line_length": 42.5, "blob_id": "b82490b78d62d7cebe178b8cced94dad5f2354a3", "content_id": "0e63507bc36f647236fa275c2aca9e1f58743a1a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 87, "license_type": "no_license", "max_line_length": 43, "num_lines": 2, "path": "/src/cpp/render/shaders/ShaderUtils.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"render/shaders/ShaderPrograms.h\"\n#include \"render/shaders/ShaderProgram.hpp\"\n" }, { "alpha_fraction": 0.757709264755249, "alphanum_fraction": 0.757709264755249, "avg_line_length": 27.375, "blob_id": "7099852e8fb86ece1fc785f05eccbe2ef3a96761", "content_id": "b31cbbfc18ef00287e5b62c879d0a04f5398abd7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 908, "license_type": "no_license", "max_line_length": 66, "num_lines": 32, "path": "/src/cpp/world/entities/SecurityCamera.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_WORLD_ENTITIES_SECURITYCAMERA_HPP_\n#define FYP_WORLD_ENTITIES_SECURITYCAMERA_HPP_\n\n#include \"world/Entity.hpp\"\n#include \"render/SkeletalModel.hpp\"\n#include \"render/SkeletalAnimation.hpp\"\n#include \"ai/PerceptionManager.hpp\"\n#include \"render/DDSImage.hpp\"\n\nnamespace world {\n\tnamespace entities {\n\t\tclass SecurityCamera : public Entity {\n\t\t\ttypedef Entity super;\n\t\t\tprivate:\n\t\t\t\trender::SkeletalModel *model;\n\t\t\t\trender::SkeletalAnimation *initAnimation;\n\t\t\t\tdouble animationDuration,animationCurrent,warningStop;\n\t\t\t\tbool initAnimating,warning;\n\t\t\t\tai::PerceptionManager *perception;\n\t\t\t\tGLuint billboardVertexArrayId,billboardVertexPositionBufferID;\n\t\t\t\trender::DDSImage *warningImage;\n\t\t\tpublic:\n\t\t\t\tSecurityCamera();\n\t\t\t\tvirtual ~SecurityCamera();\n\t\t\t\tvirtual void tick(util::DeltaTime &deltaTime);\n\t\t\t\tvirtual void render(render::RenderManager &rManager);\n\t\t\t\tbool selector;\n\t\t};\n\t}\n}\n\n#endif\n" }, { "alpha_fraction": 0.7236679196357727, "alphanum_fraction": 0.7385377883911133, "avg_line_length": 27.821428298950195, "blob_id": "4ad688c7b5afb44e06cb8de5d2dd69602d94a5df", "content_id": "fba49c6a058a714ba9bf4bb8c5aa5b7ff69db25d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2421, "license_type": "no_license", "max_line_length": 171, "num_lines": 84, "path": "/src/cpp/gui/screens/GameView.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"world/World.hpp\"\n#include \"render/RenderManager.hpp\"\n#include \"util/DeltaTime.hpp\"\n#include \"input/Controls.hpp\"\n\n#include \"gui/ScreenManager.hpp\"\n#include \"gui/screens/MainMenu.hpp\"\n#include \"gui/screens/GamePauseMenu.hpp\"\n\n#include <glm/gtc/matrix_transform.hpp>\n\n//debug\n#include <iostream>\n\n#include \"GameView.hpp\"\n\nusing namespace world;\nusing namespace screens;\n\nGameView::GameView() {\n\tthis->world = new World(this);\n\tthis->statusFont = new render::Font(\"cour.ttf\", 22);\n\tthis->wasSurface = true;\n\tthis->pauseStartTime = 0;\n\tthis->pauseOffsetTime = 0;\n\tthis->gameCompletionState = -1;\n}\n\nGameView::~GameView() {\n\tdelete this->world;\n}\nvoid GameView::onGameOver(int type)\n{\n\tthis->gameCompletionState = type;\n}\nvoid GameView::render(util::DeltaTime &deltaTime, render::RenderManager &manager) {\n\t// Updating the world\n\tbool isSurface = this->manager->isScreenSurface(this);\n\tif(this->gameCompletionState>=0)\n\t\tisSurface = false;\n\tif(wasSurface!=isSurface)\n\t{\n\t\twasSurface = isSurface;\n\t\tif(!isSurface)\n\t\t\tpauseStartTime = deltaTime.getTime();\n\t\telse\n\t\t\tpauseOffsetTime += deltaTime.getTime()-pauseStartTime;\n\t}\n\tutil::DeltaTime dt2 = deltaTime;\n\tdt2.setOffsetTime(-pauseOffsetTime);\n\tthis->world->tick(dt2, isSurface);\n\t// Render the 3D world\n\trender::RenderManager gameRenderManager;\n\tgameRenderManager.setDimensionsPx(manager.getWidthPx(), manager.getHeightPx());\n\tgameRenderManager.setDimensionsMM(manager.getWidthMM(), manager.getHeightMM());\n\tgameRenderManager.P = glm::perspective(70.f, (float)(this->manager->getWidth()/this->manager->getHeight()), 0.1f, 10000.f);\n\tgameRenderManager.markPDirty();\n\tthis->world->render3D(gameRenderManager, isSurface);\n\t// Render the 2D elements\n\tthis->world->render2D(manager, isSurface);\n\n\tif(this->gameCompletionState>=0)\n\t{\n\t\tmanager.M = glm::translate(glm::mat4(1.0f), glm::vec3(manager.getWidthMM()/2-this->statusFont->getTextWidth(\"GAME OVER\\0\", manager)/4, manager.getHeightMM()/2-22/2, 0));\n\t\tmanager.markMDirty();\n\t\tthis->statusFont->printf(\"GAME OVER\", manager);\n\t}\n}\nbool GameView::onControlEvent(Control control, int action)\n{\n\tif(control==CONTROL_GUI_ESCAPE && action==CONTROL_KEYACTION_RELEASE)\n\t{\n\t\tthis->manager->openScreen(new GamePauseMenu());\n\t\treturn true;\n\t}\n\tif(this->gameCompletionState<0)\n\t\tthis->world->onDebugControl(control, action);\n\treturn true;\n}\nvoid GameView::onScreenResize() {\n}\nbool GameView::supportsCursor() {\n\treturn false;\n}\n" }, { "alpha_fraction": 0.7427138090133667, "alphanum_fraction": 0.7479448914527893, "avg_line_length": 33.31135559082031, "blob_id": "2cd7b61fbfef41508f8cf2a8e5bb19fff1015b4e", "content_id": "29eb3f1e6c8423e1fa583e991a5f315373dc4756", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 9367, "license_type": "no_license", "max_line_length": 167, "num_lines": 273, "path": "/src/cpp/gui/WindowScreenManager.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include <glm/gtc/matrix_transform.hpp>\n#include \"util/gl.h\"\n#include <iostream>\n#include <string>\n#include \"gui/screens/MainMenu.hpp\"\n#include \"util/Console.hpp\"\n#include \"render/BasicShapes.hpp\"\n#include \"input/Controls.hpp\"\n#include \"render/RenderManager.hpp\"\n#include \"util/DeltaTime.hpp\"\n#include \"util/AssetManager.hpp\"\n#include \"render/shaders/ShaderProgram.hpp\"\n\n//TODO: remove this\n#include \"render/shaders/ShaderUtils.hpp\"\n#include \"render/Font.hpp\"\n\n\nusing util::AssetManager;\n\n#include \"WindowScreenManager.hpp\"\n\nusing namespace render;\n\nGLFWwindow *superdebug_window = 0;\n\nstatic render::Font *debugRenderFont;\n\nWindowScreenManager *WindowScreenManager::eventHandler = 0;\nWindowScreenManager::WindowScreenManager() : ScreenManager()\n{\n\tthis->lastWindowWidthPx = 0;\n\tthis->lastWindowHeightPx = 0;\n\tthis->windowWidthPx = 0;\n\tthis->windowHeightPx = 0;\n\tthis->monitorWidthPx = 0;\n\tthis->monitorHeightPx = 0;\n\tthis->modeWidthPx = 0;\n\tthis->modeHeightPx = 0;\n\tthis->scale = 1;\n\tthis->lastX = 0;\n\tthis->lastY = 0;\n\tthis->supportedCursorLastX = 0;\n\tthis->supportedCursorLastY = 0;\n\tthis->unsupportedCursorLastX = 0;\n\tthis->unsupportedCursorLastY = 0;\n\tthis->supportedCursorInit = true;\n\tthis->unsupportedCursorInit = true;\n\tthis->skipNextEvent = false;\n\tthis->didSupportCursor = false;\n\n\tConsole::println(CONSOLE_TAG_MAIN, \"WindowScreenManager: [INIT] Starting\");\n\n\tWindowScreenManager::eventHandler = this;\n\tglfwSetErrorCallback(WindowScreenManager::onError);\n\t// Initialise and create window\n\tif(!glfwInit())\n\t{\n\t\tstd::cerr << \"GLFW Init failed\" << std::endl;\n\t\texit(EXIT_FAILURE);\n\t}\n\t/*glfwWindowHint(GLFW_SAMPLES, 4);\n\tglfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);\n\tglfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);\n\tglfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);\n\tglfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);*/\n\t// Create window\n\tthis->window = glfwCreateWindow(800, 600, \"FYP\", NULL, NULL);\n\t//TODO: remove debug\n\tsuperdebug_window = window;\n\tif(!this->window)\n\t{\n\t\tstd::cerr << \"Window creation failed\" << std::endl;\n\t\tglfwTerminate();\n\t\texit(EXIT_FAILURE);\n\t}\n\tglfwMakeContextCurrent(this->window);\n\t\n//#ifdef USE_GLEW\n\t// Setup GLEW\n\tglewExperimental = GL_TRUE;\n\tif(glewInit())\n\t{\n\t\tstd::cerr << \"GLEW initialization failed\" << std::endl;\n\t\texit(EXIT_FAILURE);\n\t}\n//#endif\n\t\n\t// Print info\n\tConsole::println(CONSOLE_TAG_MAIN, \"WindowScreenManager: [INIT] OpenGL version: \"+std::string((const char *)glGetString(GL_VERSION)));\n\tConsole::println(CONSOLE_TAG_MAIN, \"WindowScreenManager: [INIT] GLSL version: \"+std::string((const char *)glGetString(GL_SHADING_LANGUAGE_VERSION)));\n\tConsole::println(CONSOLE_TAG_MAIN, \"WindowScreenManager: [INIT] Vendor: \"+std::string((const char *)glGetString(GL_VENDOR)));\n\tConsole::println(CONSOLE_TAG_MAIN, \"WindowScreenManager: [INIT] Renderer: \"+std::string((const char *)glGetString(GL_RENDERER)));\n\t\n\t// Event handlers\n\tglfwSetKeyCallback(this->window, WindowScreenManager::onKeyEvent);\n\tglfwSetCursorPosCallback(this->window, WindowScreenManager::onCursorPosEvent);\n\tglfwSetMouseButtonCallback(this->window, WindowScreenManager::onMouseButtonEvent);\n\tglfwSetScrollCallback(this->window, WindowScreenManager::onScrollEvent);\n\t\n\trender::shaders::ShaderProgram::loadShaders();\n\tBasicShapes::init();\n\tglEnableClientState(GL_VERTEX_ARRAY);\n\tthis->openRootScreen(new MainMenu());\n\n\tutil::AssetManager::getAssetManager()->init();\n\n\tdebugRenderFont = new render::Font(\"cour.ttf\", 8);\n\tConsole::println(CONSOLE_TAG_MAIN, \"WindowScreenManager: [INIT] Started\");\n}\nWindowScreenManager::~WindowScreenManager()\n{\n\t\n}\nvoid WindowScreenManager::run()\n{\n\tConsole::println(CONSOLE_TAG_MAIN, \"WindowScreenManager: [RUN] Running\");\n\tglClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);\n\trender::RenderManager renderManager;\n\tutil::DeltaTime deltaTime(true, 60);\n\tAssetManager *am = AssetManager::getAssetManager();\n\tlong loop = 0;\n\twhile(!glfwWindowShouldClose(this->window))\n\t{\n\t\t// Postload when ready\n\t\tif(am!=0 && am->postload())\n\t\t{\n\t\t\tam = 0;\n\t\t\tConsole::println(CONSOLE_TAG_MAIN, \"WindowScreenManager: [RUN] Assets Loaded\");\n\t\t}\n\n\t\t// Get screen dimensions\n\t\tglfwGetFramebufferSize(this->window, &windowWidthPx, &windowHeightPx);\n\t\tif(windowWidthPx!=this->lastWindowWidthPx || windowHeightPx!=this->lastWindowHeightPx)\n\t\t{\n\t\t\tglViewport(0, 0, windowWidthPx, windowHeightPx);\n\t\t\trenderManager.setDimensionsPx(windowWidthPx, windowHeightPx);\n\t\t\tthis->lastWindowWidthPx = windowWidthPx;\n\t\t\tthis->lastWindowHeightPx = windowHeightPx;\n\t\t\tGLFWmonitor *monitor = glfwGetPrimaryMonitor();\n\t\t\tif(monitor!=0)\n\t\t\t{\n\t\t\t\tglfwGetMonitorPhysicalSize(monitor, &monitorWidthPx, &monitorHeightPx);\n\t\t\t\tconst GLFWvidmode *mode = glfwGetVideoMode(monitor);\n\t\t\t\tthis->modeWidthPx = mode->width;\n\t\t\t\tthis->modeHeightPx = mode->height;\n\t\t\t\tthis->width = windowWidthPx*monitorWidthPx/modeWidthPx/scale;\n\t\t\t\tthis->height = windowHeightPx*monitorHeightPx/modeHeightPx/scale;\n\t\t\t\trenderManager.setDimensionsMM(width, height);\n\t\t\t\trenderManager.P = glm::ortho(0.f, (float)this->width, 0.f, (float)this->height, 0.f, 1.f);\n\t\t\t\trenderManager.markPDirty();\n\t\t\t\tthis->onScreenResize();\n\t\t\t}\n\t\t}\n\t\t\n\t\t// Calculate FPS\n\t\tdeltaTime.postTime(glfwGetTime());\n\t\t\n\t\t// Render\n\t\tglClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);\n\n\t\tthis->render(deltaTime, renderManager);\n\t\t\n\t\tglm::mat4 boxMat = glm::mat4(1.0f);\n\t\trenderManager.M = boxMat;\n\t\trenderManager.markMDirty();\n\t\tdebugRenderFont->printf(\"FPS: \"+std::to_string(static_cast<int>(std::round(deltaTime.getFramerate()))), renderManager);\n\n\t\t// Update frame buffer\n\t\tglfwSwapBuffers(this->window);\n\t\tglfwPollEvents();\n\t\tloop++;\n\t}\n\tConsole::println(CONSOLE_TAG_MAIN, \"WindowScreenManager: [RUN] Window Closed\");\n}\nvoid WindowScreenManager::close()\n{\n\tglfwSetWindowShouldClose(this->window, GL_TRUE);\n}\nvoid WindowScreenManager::onError(int error, const char *msg)\n{\n\tstd::cerr << msg << std::endl;\n}\nvoid WindowScreenManager::onKeyEvent(GLFWwindow *window, int key, int scancode, int action, int mode)\n{\n\tswitch(action)\n\t{\n\t\tcase GLFW_PRESS:\n\t\t\taction = CONTROL_KEYACTION_PRESS;\n\t\t\tbreak;\n\t\tcase GLFW_RELEASE:\n\t\t\taction = CONTROL_KEYACTION_RELEASE;\n\t\t\tbreak;\n\t\tcase GLFW_REPEAT:\n\t\t\taction = CONTROL_KEYACTION_REPEAT;\n\t\t\tbreak;\n\t}\n\tWindowScreenManager::eventHandler->onControlEvent(key, action);\n}\nvoid WindowScreenManager::onCursorPosEvent(GLFWwindow *window, double x, double y)\n{\n\t// GLFW fires a cursor pos event to the center of the screen every time the cursor is set to normal\n\tif(WindowScreenManager::eventHandler->skipNextEvent)\n\t{\n\t\tWindowScreenManager::eventHandler->skipNextEvent = false;\n\t\treturn;\n\t}\n\ty = WindowScreenManager::eventHandler->windowHeightPx-y;\n\tx = x*WindowScreenManager::eventHandler->monitorWidthPx/WindowScreenManager::eventHandler->modeWidthPx/WindowScreenManager::eventHandler->scale;\n\ty = y*WindowScreenManager::eventHandler->monitorHeightPx/WindowScreenManager::eventHandler->modeHeightPx/WindowScreenManager::eventHandler->scale;\n\tWindowScreenManager::eventHandler->onControlEvent(CONTROL_ACTION_MOUSE, x, y, WindowScreenManager::eventHandler->lastX-x, WindowScreenManager::eventHandler->lastY-y);\n\tWindowScreenManager::eventHandler->lastX = x;\n\tWindowScreenManager::eventHandler->lastY = y;\n}\nvoid WindowScreenManager::onMouseButtonEvent(GLFWwindow* window, int button, int action, int mods)\n{\n\tswitch(action)\n\t{\n\t\tcase GLFW_PRESS:\n\t\t\taction = CONTROL_MOUSEBUTTONACTION_PRESS;\n\t\t\tbreak;\n\t\tcase GLFW_RELEASE:\n\t\t\taction = CONTROL_MOUSEBUTTONACTION_RELEASE;;\n\t\t\tbreak;\n\t\tcase GLFW_REPEAT:\n\t\t\taction = CONTROL_MOUSEBUTTONACTION_REPEAT;\n\t\t\tbreak;\n\t}\n\tWindowScreenManager::eventHandler->onControlEvent(button, action);\n}\nvoid WindowScreenManager::onScrollEvent(GLFWwindow* window, double dx, double dy)\n{\n\tdx = dx*WindowScreenManager::eventHandler->monitorWidthPx/WindowScreenManager::eventHandler->modeWidthPx/WindowScreenManager::eventHandler->scale;\n\tdy = -dy*WindowScreenManager::eventHandler->monitorHeightPx/WindowScreenManager::eventHandler->modeHeightPx/WindowScreenManager::eventHandler->scale;\n\tWindowScreenManager::eventHandler->onControlEvent(CONTROL_ACTION_SCROLL, 0, 0, dx, dy);\n}\nvoid WindowScreenManager::onSurfaceScreenChanged(Screen *screen)\n{\n\tif(screen->supportsCursor()!=this->didSupportCursor)\n\t{\n\t\tif(screen->supportsCursor())\n\t\t{\n\t\t\tdouble x = this->supportedCursorLastX;\n\t\t\tdouble y = this->supportedCursorLastY;\n\t\t\tif(this->supportedCursorInit)\n\t\t\t{\n\t\t\t\tthis->supportedCursorInit = false;\n\t\t\t\tx = this->windowHeightPx/2;\n\t\t\t\ty = this->windowHeightPx/2;\n\t\t\t}\n\n\t\t\tglfwGetCursorPos(this->window, &this->unsupportedCursorLastX, &this->unsupportedCursorLastY);\n\t\t\tglfwSetInputMode(this->window, GLFW_CURSOR, GLFW_CURSOR_NORMAL);\n\t\t\tglfwSetCursorPos(this->window, x, y);\n\n\t\t\ty = this->windowHeightPx-y;\n\t\t\tx = x*this->monitorWidthPx/this->modeWidthPx/this->scale;\n\t\t\ty = y*this->monitorHeightPx/this->modeHeightPx/this->scale;\n\t\t\tthis->onControlEvent(CONTROL_ACTION_MOUSE, x, y, 0, 0);\n\t\t\tthis->skipNextEvent = true;\n\t\t\tthis->lastX = x;\n\t\t\tthis->lastY = y;\n\t\t}\n\t\telse\n\t\t{\n\t\t\tglfwGetCursorPos(this->window, &this->supportedCursorLastX, &this->supportedCursorLastY);\n\t\t\tglfwSetInputMode(this->window, GLFW_CURSOR, GLFW_CURSOR_DISABLED);\n\t\t\tglfwSetCursorPos(this->window, this->unsupportedCursorLastX, this->unsupportedCursorLastY);\n\t\t}\n\t}\n\tScreenManager::onSurfaceScreenChanged(screen);\n\tthis->didSupportCursor = screen->supportsCursor();\n}\n" }, { "alpha_fraction": 0.727053165435791, "alphanum_fraction": 0.727053165435791, "avg_line_length": 16.25, "blob_id": "198d4d303e8e56c895330f3bf1760342d9d51efb", "content_id": "c2a27a4a6cc8a2db5e4d22e549e6cef98a393e05", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 414, "license_type": "no_license", "max_line_length": 49, "num_lines": 24, "path": "/src/cpp/input/ControlScheme.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_INPUT_CONTROLSCHEME_HPP_\n#define FYP_INPUT_CONTROLSCHEME_HPP_\n\nnamespace util {\n\tclass DeltaTime;\n}\nnamespace world {\n\tclass Entity;\n}\n\n#include <glm/glm.hpp>\n\nnamespace controls {\n\tclass ControlScheme {\n\t\tprotected:\n\t\t\tworld::Entity *controlEntity;\n\t\tpublic:\n\t\t\tControlScheme(world::Entity *controlEntity);\n\t\t\tvirtual ~ControlScheme();\n\t\t\tvirtual void tick(util::DeltaTime &deltaTime);\n\t};\n};\n\n#endif\n" }, { "alpha_fraction": 0.7237769365310669, "alphanum_fraction": 0.7364970445632935, "avg_line_length": 40.209678649902344, "blob_id": "772137417e21fe9eacb0e196104ea332b76ac8cb", "content_id": "5fae011d498dd316e46637f3574d992e522967f5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 10220, "license_type": "no_license", "max_line_length": 206, "num_lines": 248, "path": "/tmp/StaticMesh.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"tribox2.h\"\n#include \"render/StaticModel.hpp\"\n\n//debug\n#include \"render/shaders/ShaderProgram.hpp\"\n#include <iostream>\n\n#include \"StaticMesh.hpp\"\n\nusing namespace render;\nusing namespace world::collisions;\nusing namespace util::Boundaries;\n\n#define CELL_MAX_LEVEL 8\n#define CELL_MAX_TRIANGLES_PER_CELL 1\n\nStaticMesh::StaticMesh()\n{\n\t\n}\nStaticMesh::~StaticMesh()\n{\n\t\n}\n// Adding a static model to the static mesh collision\nvoid StaticMesh::addStaticModel(render::StaticModel *model)\n{\n\tint objectCount = 0;\n\tStaticMeshTriangle triangle;\n\ttriangle.model = model;\n\tthis->rootNode.bounds = &model->getBounds();\n\tthis->rootNode.level = 0;\n\tthis->rootNode.leaf = true;\n\tthis->rootNode.children.clear();\n\tthis->rootNode.triangles.clear();\n\tfor(render::StaticModelObject *oj : model->objects)\n\t{\n\t\ttriangle.modelObject = oj;\n\t\tfor(int i=0;i<oj->numPrimitives;i++)\n\t\t{\n\t\t\t//std::cout << \"Begin Triangle\" << std::endl;\n\t\t\ttriangle.triangleIndex = i;\n\t\t\taddTriangleToCell(triangle, this->rootNode, 0);\n\t\t}\n\t\tobjectCount++;\n\t}\n}\nvoid StaticMesh::addTriangleToCell(StaticMeshTriangle &triangle, StaticMeshCell &cell, int cellLevel)\n{\n\tif(cell.leaf)\n\t{\n\t\tcell.triangles.push_back(triangle);\n\t\tif(cell.triangles.size()>CELL_MAX_TRIANGLES_PER_CELL && cell.level<=CELL_MAX_LEVEL)\n\t\t{\n\t\t\t//std::cout << \"\\t\" << cellLevel << \":S cellTriangleCount=\" << cellTriangleCount << std::endl;\n\t\t\t// Add triangles from this ex-leaf cell to child cells\n\t\t\tcell.leaf = false;\n\t\t\tsubdivideCell(cell, cellLevel);\n\t\t\tfor(auto tri : cell.triangles)\n\t\t\t{\n\t\t\t\taddTriangleToCellChildren(tri, cell, cellLevel+1);\n\t\t\t}\n\t\t\t// Non leaf nodes don't need triangles in them\n\t\t\tcell.triangles.clear();\n\t\t}\n\t}\n\telse\n\t{\n\t\t//std::cout << \"\\t\" << cellLevel << \":N cellTriangleCount=\" << cellTriangleCount << std::endl;\n\t\taddTriangleToCellChildren(triangle, cell, cellLevel+1);\n\t}\n\t//std::cout << \"\\t\" << cellLevel << \":T cellTriangleCount=\" << cell.triangles.size() << std::endl;\n}\nvoid StaticMesh::addTriangleToCellChildren(StaticMeshTriangle &triangle, StaticMeshCell &parent, int childrenLevel)\n{\n\tfor(StaticMeshCell &child : parent.children)\n\t{\n\t\t// Get the vertecies of the triangle\n\t\tGLfloat *vertexAPosition = &triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+0]];\n\t\tGLfloat *vertexBPosition = &triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+1]];\n\t\tGLfloat *vertexCPosition = &triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+2]];\n\t\tif(triBoxOverlap(child.bounds->boxCenter, child.bounds->boxHalfSize, vertexAPosition, vertexBPosition, vertexCPosition))\n\t\t{\n\t\t\tthis->addTriangleToCell(triangle, child, childrenLevel);\n\t\t}\n\t}\n}\n// Cell subdivision functions\nvoid StaticMesh::subdivideCell(StaticMeshCell &parent, int childrenLevel)\n{\n\t//std::cout << \"subdivide: Level:\" << childrenLevel << std::endl;\n#define X 0\n#define Y 1\n#define Z 2\n\t// Determine which should be split\n\tbool splitX = parent.bounds->boxHalfSize[X]*2>parent.bounds->boxHalfSize[Y] || parent.bounds->boxHalfSize[X]*2>parent.bounds->boxHalfSize[Z];\n\tbool splitY = parent.bounds->boxHalfSize[Y]*2>parent.bounds->boxHalfSize[X] || parent.bounds->boxHalfSize[Y]*2>parent.bounds->boxHalfSize[Z];\n\tbool splitZ = parent.bounds->boxHalfSize[Z]*2>parent.bounds->boxHalfSize[X] || parent.bounds->boxHalfSize[Z]*2>parent.bounds->boxHalfSize[Y];\n\t\n\t// Calculate the sizes of the new cells\n\tfloat boxSplitHalfSize[] = {\n\t\tsplitX ? parent.bounds->boxHalfSize[X]/2 : parent.bounds->boxHalfSize[X],\n\t\tsplitY ? parent.bounds->boxHalfSize[Y]/2 : parent.bounds->boxHalfSize[Y],\n\t\tsplitZ ? parent.bounds->boxHalfSize[Z]/2 : parent.bounds->boxHalfSize[Z]\n\t};\n\t// Calculate the centers of all the lower set of cells\n\tfloat boxSplitCenter1[] = {\n\t\tsplitX ? parent.bounds->boxCenter[X]-parent.bounds->boxHalfSize[X]/2: parent.bounds->boxCenter[X],\n\t\tsplitY ? parent.bounds->boxCenter[Y]-parent.bounds->boxHalfSize[Y]/2: parent.bounds->boxCenter[Y],\n\t\tsplitZ ? parent.bounds->boxCenter[Z]-parent.bounds->boxHalfSize[Z]/2: parent.bounds->boxCenter[Z],\n\t};\n\t// Calculate the centers of all the upper set of cells\n\tfloat boxSplitCenter2[] = {\n\t\tboxSplitCenter1[X]+parent.bounds->boxHalfSize[X],\n\t\tboxSplitCenter1[Y]+parent.bounds->boxHalfSize[Y],\n\t\tboxSplitCenter1[Z]+parent.bounds->boxHalfSize[Z]\n\t};\n\t\n\t\t\t\t\t\taddChildToCell(parent, childrenLevel, new AABB(boxSplitCenter1[X], boxSplitCenter1[Y], boxSplitCenter1[Z], boxSplitHalfSize[X], boxSplitHalfSize[Y], boxSplitHalfSize[Z]));\n\tif(splitZ)\t\t\t\taddChildToCell(parent, childrenLevel, new AABB(boxSplitCenter1[X], boxSplitCenter1[Y], boxSplitCenter2[Z], boxSplitHalfSize[X], boxSplitHalfSize[Y], boxSplitHalfSize[Z]));\n\tif(splitY)\t\t\t\taddChildToCell(parent, childrenLevel, new AABB(boxSplitCenter1[X], boxSplitCenter2[Y], boxSplitCenter1[Z], boxSplitHalfSize[X], boxSplitHalfSize[Y], boxSplitHalfSize[Z]));\n\tif(splitY && splitZ)\t\t\taddChildToCell(parent, childrenLevel, new AABB(boxSplitCenter1[X], boxSplitCenter2[Y], boxSplitCenter2[Z], boxSplitHalfSize[X], boxSplitHalfSize[Y], boxSplitHalfSize[Z]));\n\tif(splitX)\t\t\t\taddChildToCell(parent, childrenLevel, new AABB(boxSplitCenter2[X], boxSplitCenter1[Y], boxSplitCenter1[Z], boxSplitHalfSize[X], boxSplitHalfSize[Y], boxSplitHalfSize[Z]));\n\tif(splitX && splitZ)\t\t\taddChildToCell(parent, childrenLevel, new AABB(boxSplitCenter2[X], boxSplitCenter1[Y], boxSplitCenter2[Z], boxSplitHalfSize[X], boxSplitHalfSize[Y], boxSplitHalfSize[Z]));\n\tif(splitX && splitY)\t\t\taddChildToCell(parent, childrenLevel, new AABB(boxSplitCenter2[X], boxSplitCenter2[Y], boxSplitCenter1[Z], boxSplitHalfSize[X], boxSplitHalfSize[Y], boxSplitHalfSize[Z]));\n\tif(splitX && splitY && splitZ)\t\taddChildToCell(parent, childrenLevel, new AABB(boxSplitCenter2[X], boxSplitCenter2[Y], boxSplitCenter2[Z], boxSplitHalfSize[X], boxSplitHalfSize[Y], boxSplitHalfSize[Z]));\n}\nvoid StaticMesh::addChildToCell(StaticMeshCell &parent, int childLevel, AABB *boxBounds)\n{\n\tStaticMeshCell cell;\n\tcell.bounds = boxBounds;\n\tcell.level = childLevel;\n\tcell.leaf = true;\n\tcell.children.clear();\n\tcell.triangles.clear();\n\tparent.children.push_back(cell);\n}\n\n\n//## Debug rendering functions\nvoid StaticMesh::render(render::RenderManager &rManager, AABB &selectionBox)\n{\n\t//render::shaders::ShaderProgram *proc = rManager.useShader(SHADER_solidColor);\n\t//render::BasicShapes::renderUnitCube(proc->getShaderLocation(false, SHADERVAR_vertex_position));\n\t//this->rootNode.bounds->render(rManager, glm::vec4(0.f, 1.f, 0.f, 1.0f), false);\n\tthis->renderCellChildren(rManager, selectionBox, &this->rootNode);\n}\nvoid StaticMesh::renderCellChildren(render::RenderManager &rManager, AABB &selectionBox, StaticMeshCell *cell)\n{\n//std::cout << \"RENDE\" << std::endl;\n\tif(!selectionBox.checkIntersect(*cell->bounds))\n\t\treturn;\n\tcell->bounds->render(rManager, glm::vec4(0.f, 1.f, (float)cell->level/CELL_MAX_LEVEL, 1.0f), false);\n\tfor(StaticMeshCell &c : cell->children)\n\t{\n\t\tthis->renderCellChildren(rManager, selectionBox, &c);\n\t}\n}\n\n\n//##Raycasting functions\nRaycastResult *StaticMesh::rayCast(Raycast &raycast)\n{\n\tRaycastResult *result = new RaycastResult;\n\tresult->distance = std::numeric_limits<float>::max();\n\tthis->rayCastCellChildren(raycast, this->rootNode, *result);\n\tif(result->distance<0 || result->distance==std::numeric_limits<float>::max())\n\t\treturn 0;\n\treturn result;\n}\nStaticMeshTriangle *StaticMesh::rayCastCellChildren(Raycast &raycast, StaticMeshCell &cell, RaycastResult &result)\n{\n\tStaticMeshTriangle *resultTriangle = 0,*triangleTmp=0;\n\tfloat cellDistance;\n\tfor(StaticMeshCell &cell : cell.children)\n\t{\n\t\tcellDistance = cell.bounds->rayCastDistance(raycast);\n\t\t//if(cellDistance!=-1 && cellDistance<=result.distance)\n\t\tif(cellDistance!=-1)\n\t\t{\n\t\t\tif(cell.leaf)\n\t\t\t\ttriangleTmp = this->rayCastCellTriangles(raycast, cell, result);\n\t\t\telse\n\t\t\t\ttriangleTmp = this->rayCastCellChildren(raycast, cell, result);\n\t\t\tif(triangleTmp!=0)\n\t\t\t\tresultTriangle = triangleTmp;\n\t\t}\n\t}\n\treturn resultTriangle;\n}\nStaticMeshTriangle *StaticMesh::rayCastCellTriangles(Raycast &raycast, StaticMeshCell &cell, RaycastResult &result)\n{\n\tStaticMeshTriangle *resultTriangle = 0;\n\tglm::vec3 plainDirection[2],pvec,tvec,qvec;\n\tfloat det,t,u,v;\n\tbool invert = false;\n\tfor(StaticMeshTriangle &triangle : cell.triangles)\n\t{\n\t\tGLfloat *vertexAPosition = &triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+0]];\n\t\tGLfloat *vertexBPosition = &triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+1]];\n\t\tGLfloat *vertexCPosition = &triangle.model->dataBuffer[triangle.model->dataBufferStride*triangle.modelObject->indecies[triangle.triangleIndex*3+2]];\n\t\tplainDirection[0].x = vertexBPosition[0]-vertexAPosition[0];\n\t\tplainDirection[0].y = vertexBPosition[1]-vertexAPosition[1];\n\t\tplainDirection[0].z = vertexBPosition[2]-vertexAPosition[2];\n\t\tplainDirection[1].x = vertexCPosition[0]-vertexAPosition[0];\n\t\tplainDirection[1].y = vertexCPosition[1]-vertexAPosition[1];\n\t\tplainDirection[1].z = vertexCPosition[2]-vertexAPosition[2];\n\t\tpvec = glm::cross(raycast.rayDirection, plainDirection[1]);\n\t\tdet = glm::dot(plainDirection[0], pvec);\n\t\t//result.distance = det;\n\t\tif(det>0.00001 && det<0.00001)//If ray is parallel(ish) to the plane\n\t\t{\n\t\t\tcontinue;\n\t\t}\n\t\ttvec.x = raycast.rayOrigin.x-vertexAPosition[0];\n\t\ttvec.y = raycast.rayOrigin.y-vertexAPosition[1];\n\t\ttvec.z = raycast.rayOrigin.z-vertexAPosition[2];\n\t\tqvec = glm::cross(tvec, plainDirection[0]);\n\t\tu = glm::dot(tvec, pvec);\n\t\tv = glm::dot(raycast.rayDirection, qvec);\n\t\tt = glm::dot(plainDirection[1], qvec)/det;\n\t\tif(det<0)\n\t\t{\n\t\t\tdet = -det;\n\t\t\tu = -u;\n\t\t\tv = -v;\n\t\t\tinvert = true;\n\t\t}\n\t\tif(u<0 || u>det)\n\t\t{\n\t\t\tcontinue;\n\t\t}\n\t\tif(v<0 || u+v>det)\n\t\t{\n\t\t\tcontinue;\n\t\t}\n\t\tif(t<result.distance)\n\t\t{\n\t\t\tresult.distance = t;\n\t\t\tif(invert)\n\t\t\t\tresult.hitNormal = glm::normalize(glm::cross(plainDirection[1], plainDirection[0]));\n\t\t\telse\n\t\t\t\tresult.hitNormal = glm::normalize(glm::cross(plainDirection[0], plainDirection[1]));\n\t\t\tresultTriangle = &triangle;\n\t\t}\n\t}\n\treturn resultTriangle;\n}\n" }, { "alpha_fraction": 0.6968221068382263, "alphanum_fraction": 0.7121317386627197, "avg_line_length": 30.2391300201416, "blob_id": "6a56bb278da1ecc3b67a83bee65aa4fef9158cd3", "content_id": "38f72be1f819298412b6f3b140e53193528ca88d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4311, "license_type": "no_license", "max_line_length": 156, "num_lines": 138, "path": "/src/cpp/ai/PerceptionManager.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"world/Entity.hpp\"\n#include \"world/World.hpp\"\n#include \"util/Boundaries.hpp\"\n\n#include \"PerceptionManager.hpp\"\n\nusing namespace ai;\nusing namespace world;\nusing namespace util::Boundaries;\n\nPerceptionManager::PerceptionManager(Entity *controlEntity, std::initializer_list<const std::type_info *> searchTypes, glm::vec3 offset_modelSpace)\n{\n\tthis->currentYaw = 0.f;\n\tthis->currentPitch = 0.f;\n\tthis->targetYaw = 0.f;\n\tthis->targetPitch = 0.f;\n\tthis->targetEntity = 0;\n\tthis->offset_modelSpace = offset_modelSpace;\n\tthis->controlEntity = controlEntity;\n\tthis->searchTypes.insert(searchTypes.begin(), searchTypes.end());\n\tthis->minYaw = -M_PI;\n\tthis->maxYaw = M_PI;\n\n\tthis->targetEntity = 0;\n\tthis->perceivedEntity = 0;\n}\nPerceptionManager::~PerceptionManager()\n{\n\t\n}\nvoid PerceptionManager::setYawBounds(float minYaw, float maxYaw)\n{\n\tthis->minYaw = minYaw;\n\tthis->maxYaw = maxYaw;\n}\nextern glm::vec3 debug_point2,debug_point3;\nvoid PerceptionManager::tick(util::DeltaTime &deltaTime)\n{\n\tRaycast ray;\n\tglm::vec3 targetOrigin = this->controlEntity->getPosition()+(this->controlEntity->getOrientation()*this->offset_modelSpace);\n\tray.origin = targetOrigin;\n\tglm::vec3 targetDirection;\n\tthis->targetEntity = 0;\n\tthis->perceivedEntity = 0;\n\tEntity *e2;\n\tfor(Entity *e : *this->controlEntity->getWorld().getEntities())\n\t{\n\t\tAABB *bounds = e->getBounds();\n\t\tif(bounds!=0 && this->searchTypes.find(&typeid(*e))!=this->searchTypes.end())\n\t\t{\n\t\t\tAABB ebounds = bounds->translate(e->getPosition());\n\t\t\tglm::vec3 min = ebounds.min();\n\t\t\tglm::vec3 max = ebounds.max();\n\t\t\tfor(int i=0;i<8;i++)\n\t\t\t{\n\t\t\t\tglm::vec3 tmp = glm::vec3(\n\t\t\t\t\t\ti&1 ? min[0] : max[0],\n\t\t\t\t\t\ti&2 ? min[1] : max[1],\n\t\t\t\t\t\ti&4 ? min[2] : max[2]\n\t\t\t\t\t);\n\t\t\t\tray.direction = glm::normalize((tmp-ray.origin));\n\t\t\t\tray.maxDistance = 0.f;\n\t\t\t\te2 = this->controlEntity;\n\t\t\t\tRaycastResult result = this->controlEntity->getWorld().rayCast(ray, &e2);\n\t\t\t\tdebug_point3 = ray.origin+ray.direction*result.distance;\n\t\t\t\tif(e2==e)\n\t\t\t\t{\n\t\t\t\t\ttargetDirection = (((min+max)*0.5f)-ray.origin)*this->controlEntity->getOrientation();\n\t\t\t\t\ttargetDistance = result.distance;\n\t\t\t\t\tthis->targetEntity = e;\n\t\t\t\t\tbreak;//Stop checking this ent, we can already see it\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\tif(this->targetEntity!=0)\n\t{\n\t\tthis->targetYaw = -std::atan2(targetDirection.x, targetDirection.y);//Rotation around Z\n\t\tthis->targetPitch = -std::atan2(std::sqrt(targetDirection.x*targetDirection.x+targetDirection.y*targetDirection.y), targetDirection.z)+glm::radians(90.f);\n\t\tif(this->targetYaw<minYaw)\n\t\t\tthis->targetYaw = minYaw;\n\t\telse if(this->targetYaw>maxYaw)\n\t\t\tthis->targetYaw = maxYaw;\n\t\tif(this->targetPitch<-0.4f)\n\t\t\tthis->targetPitch = -0.4f;\n\t}\n#define TURRET_YAW_RESOLUTION 0.05f\n#define TURRET_PITCH_RESOLUTION 0.001\n\tfloat targetYawRelative = this->targetYaw-this->currentYaw;//Relative to current\n\tif(targetYawRelative>M_PI)\n\t\ttargetYawRelative -= M_PI*2.f;\n\tif(targetYawRelative<-M_PI)\n\t\ttargetYawRelative += M_PI*2.f;\n\tfloat targetPitchRelative = this->targetPitch-this->currentPitch;\n\tbool perceive = true;\n\tif(std::fabs(targetYawRelative)>TURRET_YAW_RESOLUTION)\n\t{\n\t\tthis->currentYaw += targetYawRelative>0 ? TURRET_YAW_RESOLUTION : -TURRET_YAW_RESOLUTION;\n\t\tif(this->currentYaw>M_PI)\n\t\t\tthis->currentYaw -= M_PI*2.f;\n\t\tif(this->currentYaw<-M_PI)\n\t\t\tthis->currentYaw += M_PI*2.f;\n\t\tperceive = false;\n\t}\n\tif(std::fabs(targetPitchRelative)>TURRET_PITCH_RESOLUTION)\n\t{\n\t\t//float dPitch = this->targetPitch-this->currentPitch;\n\t\t//this->currentPitch += dPitch/10;\n\t\tthis->currentPitch += targetPitchRelative>0 ? TURRET_PITCH_RESOLUTION : -TURRET_PITCH_RESOLUTION;\n\t\tperceive = false;\n\t}\n\tif(perceive)\n\t\tthis->perceivedEntity = this->targetEntity;\n}\nworld::Entity *PerceptionManager::getOriginEntity()\n{\n\treturn this->controlEntity;\n}\nworld::Entity *PerceptionManager::getTargetEntity()\n{\n\treturn this->targetEntity;\n}\nworld::Entity *PerceptionManager::getPerceivedEntity()\n{\n\treturn this->perceivedEntity;\n}\nglm::vec3 PerceptionManager::getEyePosition()\n{\n\treturn this->controlEntity->getPosition()+(this->controlEntity->getOrientation()*this->offset_modelSpace);\n}\nfloat PerceptionManager::getTargetDistance()\n{\n\treturn this->targetDistance;\n}\nglm::quat PerceptionManager::getOrientation()\n{\n\treturn glm::quat(glm::vec3(this->currentPitch, 0, this->currentYaw));\n}\n" }, { "alpha_fraction": 0.6808510422706604, "alphanum_fraction": 0.6838905811309814, "avg_line_length": 17.27777862548828, "blob_id": "a06bf020d873245d999b4c32bf1f34f56ebde497", "content_id": "d5baab84f919053a49e9a676cf66945c628c369e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 329, "license_type": "no_license", "max_line_length": 65, "num_lines": 18, "path": "/src/cpp/world/entities/Player.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_WORLD_ENTITIES_PLAYER_HPP_\n#define FYP_WORLD_ENTITIES_PLAYER_HPP_\n\n#include \"world/Entity.hpp\"\n\nnamespace world {\n\tnamespace entities {\n\t\ttypedef Entity super;\n\t\tclass Player : public Entity {\n\t\t\tpublic:\n\t\t\t\tPlayer();\n\t\t\t\t~Player();\n\t\t\t\tvirtual void die(double time, glm::vec3 direction, int type);\n\t\t};\n\t}\n}\n\n#endif\n" }, { "alpha_fraction": 0.6855894923210144, "alphanum_fraction": 0.7117903828620911, "avg_line_length": 16.615385055541992, "blob_id": "3d09eb5b84297d22366c0d5ce9fee980edf8781a", "content_id": "fac2b8070239754771d4ff27c1ce50293b7061e8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 229, "license_type": "no_license", "max_line_length": 56, "num_lines": 13, "path": "/src/cpp/render/shaders/code/fontvertex.c", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "//vertex\n\nimport in vec3 vertex_position;\nimport in vec2 vertex_texture;\n\nout vec2 uvCoord;\n\nimport uniform mat4 matrix_MVP;\n\nvoid main() {\n gl_Position = matrix_MVP * vec4(vertex_position, 1);\n uvCoord = vertex_texture;\n}\n" }, { "alpha_fraction": 0.7027778029441833, "alphanum_fraction": 0.7027778029441833, "avg_line_length": 22.225807189941406, "blob_id": "85b1f8b0eef33c01173df44cbfdd6c6487ff909d", "content_id": "a30e3fb0f219ab004c24594f05127db45f6c3c24", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 720, "license_type": "no_license", "max_line_length": 88, "num_lines": 31, "path": "/src/cpp/ai/path/PathExecuter.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_AI_PATH_PATHEXECUTER_HPP_\n#define FYP_AI_PATH_PATHEXECUTER_HPP_\n\n#include <vector>\n#include \"world/Entity.hpp\"\n#include \"util/DeltaTime.hpp\"\n#include \"ai/path/NavigationGraph.hpp\"\n\nnamespace ai {\n\tnamespace path {\n\t\tclass PathExecuter {\n\t\t\tprivate:\n\t\t\t\tstd::vector<int> currentPath;\n\t\t\t\tfloat *distances;\n\t\t\t\tfloat *timeToNextNode;\n\t\t\t\tfloat *nodeTime;\n\t\t\t\tworld::Entity *ent;\n\t\t\t\tlong currentPathNode;\n\t\t\t\tNavigationGraph *navgraph;\n\t\t\t\tfloat speed;\n\t\t\t\tdouble pathStartTime;\n\t\t\tpublic:\n\t\t\t\tPathExecuter(float movementSpeed, world::Entity *entity, NavigationGraph *navgraph);\n\t\t\t\t~PathExecuter();\n\t\t\t\tvoid postPath(const std::vector<int> &path);\n\t\t\t\tvoid tick(util::DeltaTime &deltaTime);\n\t\t};\n\t}\n}\n\n#endif\n" }, { "alpha_fraction": 0.6719492673873901, "alphanum_fraction": 0.6719492673873901, "avg_line_length": 20.03333282470703, "blob_id": "df95ef1fa765efcb5d6874147e7597a08075bb12", "content_id": "34e544b44b85f42272821c6479002b68fe36967b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1262, "license_type": "no_license", "max_line_length": 85, "num_lines": 60, "path": "/src/cpp/gui/Element.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_GUI_ELEMENT_HPP_\n#define FYP_GUI_ELEMENT_HPP_\nnamespace render {\nclass RenderManager;\n} /* namespace render */\nnamespace util {\nclass DeltaTime;\n} /* namespace util */\n\nclass Screen;\n\n#include <glm/glm.hpp>\n\nclass Element {\n\tprivate:\n\tprotected:\n\t\tfloat x, y, width, height;\n\tpublic:\n\t\tScreen *screen;\n\t\tElement *next,*prev,*left,*right,*up,*down;\n\t\tbool selected;\n\t\t\n\t\tElement();\n\t\tvirtual ~Element();\n\t\t\n\t\tfloat getX();\n\t\tfloat getY();\n\t\tfloat getWidth();\n\t\tfloat getHeight();\n\t\tElement *setX(float x);\n\t\tElement *setY(float y);\n\t\tElement *setWidth(float width);\n\t\tElement *setHeight(float height);\n\t\tElement *setSize(float x, float y, float width, float height);\n\t\tbool isInside(float x, float y);\n\t\t\n\t\t/**\n\t\t * Renders the GUI screen\n\t\t */\n\t\tvirtual void render(util::DeltaTime &deltaTime, render::RenderManager &manager);\n\t\t/**\n\t\t * Called by resize methods\n\t\t */\n\t\tvirtual void onElementResize();\n\t\t\n\t\t/**\n\t\t * On control boolean update\n\t\t */\n\t\tvirtual bool onControlEvent(int control, int action);\n\t\t/**\n\t\t * On control delta update\n\t\t */\n\t\tvirtual bool onControlEvent(int control, double x, double y, double dx, double dy);\n\t\t/**\n\t\t * Called by the screen manager whenever the screen resizes\n\t\t */\n\t\tvirtual void onScreenResize();\n};\n\n#endif\n" }, { "alpha_fraction": 0.7516737580299377, "alphanum_fraction": 0.7571515440940857, "avg_line_length": 28.872726440429688, "blob_id": "397d102d82263f4e0889ef8c7c1b13cfe49a5f56", "content_id": "cb8eef0d7f3b1d8c2d2f3232697ed62b28aa2232", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1643, "license_type": "no_license", "max_line_length": 116, "num_lines": 55, "path": "/src/cpp/ai/ObjectiveManager.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_AI_OBJECTIVEMANAGER_HPP_\n#define FYP_AI_OBJECTIVEMANAGER_HPP_\n\n#define OBJECTIVE_WAITING 0\n#define OBJECTIVE_LOCATE_ARTEFACT 1\n#define OBJECTIVE_COLLECT_ATREFACT 2\n#define OBJECTIVE_EXIT_MUSEUM 3\n\n#define OBJECTIVE_ATTACK_PLAYER 4\n#define OBJECTIVE_ATTACK_TURRET 5\n\n#define OBJECTIVE_WAITING_WAIT_TIME 2\n#define OBJECTIVE_COLLECT_ARTEFACT_WAIT_TIME 4\n\n#include \"ai/path/PathExecuter.hpp\"\n#include \"ai/path/PathFinder.hpp\"\n#include \"ai/path/PathExplorer.hpp\"\n#include \"ai/AimBot.hpp\"\n#include \"world/entities/Enemy.hpp\"\n#include \"util/DeltaTime.hpp\"\n#include \"world/entities/Turret.hpp\"\n#include <set>\n#include <unordered_set>\n\nnamespace ai {\n\tclass ObjectiveManager {\n\t\tprivate:\n\t\t\tint currentObjective;\n\t\t\tdouble artefactCollectionStartTime;\n\t\t\tPathNode *entranceNode,*artefactNode;\n\t\t\tworld::World *world;\n\t\t\tworld::entities::Enemy *enemy;\n\t\t\tworld::entities::Turret *targetTurret;\n\t\t\tai::path::PathExecuter *pathExecuter;\n\t\t\tai::path::PathExplorer *pathExplorer;\n\t\t\tai::path::PathFinder *pathFinder;\n\t\t\tai::path::NavigationGraph *navigationGraph;\n\t\t\tai::PerceptionManager *perception;\n\t\t\tai::AimBot *aimBot;\n\t\t\tglm::vec3 lastKnownPlayerPosition;\n\t\t\tPathNode *targetNode;\n\t\t\tfloat targetRadius;\n\t\t\tbool collectedArtefact;\n\t\t\tstd::set<int> visitedExitNodes;\n\t\t\tstd::unordered_set<world::entities::Turret *> knownTurrets;\n\t\tpublic:\n\t\t\tObjectiveManager(world::World *world, world::entities::Enemy *enemy, ai::path::NavigationGraph *navigationGraph);\n\t\t\t~ObjectiveManager();\n\t\t\tvoid tick(util::DeltaTime &deltaTime);\n\t\t\tvoid render(render::RenderManager &rManager);\n\t\t\tvoid perceptionCheck(util::DeltaTime &deltaTime);\n\t};\n}\n\n#endif\n" }, { "alpha_fraction": 0.7194244861602783, "alphanum_fraction": 0.7218225598335266, "avg_line_length": 22.828571319580078, "blob_id": "265fa14726cf0b238b9a8f015312572223c3167a", "content_id": "8adea8a97b129a0a01f0cb22df0ef8c483f68b09", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 834, "license_type": "no_license", "max_line_length": 63, "num_lines": 35, "path": "/src/cpp/ai/AimBot.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_AI_AIMBOT_HPP_\n#define FYP_AI_AIMBOT_HPP_\n\n#include <list>\n#include \"ai/PerceptionManager.hpp\"\n#include \"world/Entity.hpp\"\n#include \"util/DeltaTime.hpp\"\n#include \"render/RenderManager.hpp\"\n\nnamespace ai {\n\tstruct Bullet {\n\t\tdouble startTime;\n\t\tglm::vec3 start;\n\t\tglm::vec3 end;\n\t};\n\tclass AimBot {\n\t\tprivate:\n\t\t\tai::PerceptionManager *perception;\n\t\t\tstd::list<Bullet *> bullets;\n\t\t\tdouble\n\t\t\t\tbulletSpread,//radius spread\n\t\t\t\tbulletLifespan,//seconds bullets last\n\t\t\t\tbulletFireDelay,//seconds between bullets\n\t\t\t\tlastBulletFiredTime,//seconds since last bullets were fired\n\t\t\t\tperceptionFireDelay,//second between perceiving and firing\n\t\t\t\t\ttmp;\n\t\tpublic:\n\t\t\tAimBot(ai::PerceptionManager *perception);\n\t\t\t~AimBot();\n\t\t\tvoid tick(util::DeltaTime &deltaTime);\n\t\t\tvoid render(render::RenderManager &rManager);\n\t};\n}\n\n#endif\n" }, { "alpha_fraction": 0.6088379621505737, "alphanum_fraction": 0.655482828617096, "avg_line_length": 23.93877601623535, "blob_id": "74433e6b4992caf39fb26f780b7c3e8a988ee4bc", "content_id": "b9b78004ff36700b6120e0a6acdecda81b14977d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1222, "license_type": "no_license", "max_line_length": 107, "num_lines": 49, "path": "/src/cpp/util/QuaternionUtils.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"util/gl.h\"\n#include <glm/glm.hpp>\n#include <glm/gtc/quaternion.hpp>\n#include <glm/gtx/projection.hpp>\n#include <glm/gtx/norm.hpp>\n\n#include \"QuaternionUtils.hpp\"\n\nusing namespace util;\nusing namespace glm;\n\n//CITE: http://lolengine.net/blog/2013/09/18/beautiful-maths-quaternion-from-vectors - 22nd June 2016\nquat util::QuaternionUtils::rotationBetweenVectors(const vec3 &u, const vec3 &v)\n{\n\tvec3 w = cross(u, v);\n\tquat q = quat(dot(u, v), w.x, w.y, w.z);\n\tq.w += length(q);\n\treturn normalize(q);\n}\nquat util::QuaternionUtils::rotationBetween(const vec3 &v0, const vec3 &u0, const vec3 &v2, const vec3 &u2)\n{\n\t//up,forward = \n\tquat q2 = rotationBetweenVectors(u0, u2);\n\n\tvec3 v1 = rotate(v2, q2);\n\tvec3 v0_proj = proj(v0, u0);\n\tvec3 v1_proj = proj(v1, u0);\n\tquat q1 = rotationBetweenVectors(v0_proj, v1_proj);\n\n\treturn normalize(q2*q1);\n}\nvec3 util::QuaternionUtils::rotate(vec3 v, quat q)\n{\n\tglm::vec3 qv = glm::vec3(q.x, q.y, q.z);\n\tglm::vec3 t = 2.f * cross(glm::vec3(q.x, q.y, q.z), v);\n\treturn v + q.w * t + cross(qv, t);\n}\nvoid util::QuaternionUtils::calculateQuaternionW(glm::quat &q)\n{\n\tfloat t = 1.0f - (q.x*q.x)-(q.y*q.y)-(q.z*q.z);\n\tif(t<0.0f)\n\t{\n\t\tq.w = 0.f;\n\t}\n\telse\n\t{\n\t\tq.w = -std::sqrt(t);\n\t}\n}\n" }, { "alpha_fraction": 0.7142857313156128, "alphanum_fraction": 0.7142857313156128, "avg_line_length": 9.5, "blob_id": "6d8176afcc45aff6cb163d81908c8bfc5d2f236c", "content_id": "e43ce40f0d6d76e0d7841e3da94cd37fddbdfaed", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 105, "license_type": "no_license", "max_line_length": 31, "num_lines": 10, "path": "/src/cpp/ai/ActionPlanner.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"ActionPlanner.hpp\"\n\nActionPlanner::ActionPlanner()\n{\n\t\n}\nActionPlanner::~ActionPlanner()\n{\n\t\n}\n" }, { "alpha_fraction": 0.7181817889213562, "alphanum_fraction": 0.7454545497894287, "avg_line_length": 19, "blob_id": "9e7ff3cf7ea9ed49666a0a2fdbe5d138f583dfc1", "content_id": "546d81763bd96293ea00161701792c3f16bfed6a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 220, "license_type": "no_license", "max_line_length": 74, "num_lines": 11, "path": "/src/cpp/render/shaders/code/fontfragment.c", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "//fragment\n\nin vec2 uvCoord;\nimport uniform vec4 textColor;\nimport(material_map_Kd) uniform sampler2D texture;\n\nout vec4 color;\n\nvoid main() {\n\tcolor = vec4(textColor.rgb, texture2D(texture, uvCoord).r * textColor.a);\n}\n" }, { "alpha_fraction": 0.5435844659805298, "alphanum_fraction": 0.5504414439201355, "avg_line_length": 44.93307113647461, "blob_id": "46baff4dddbf31c3f187bb9abfb54e9721bbebd2", "content_id": "9f45ab116a2aa2b507c6bf75d5949a6ab1344949", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11667, "license_type": "no_license", "max_line_length": 335, "num_lines": 254, "path": "/hooks/shaders_convert.py", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\nimport os,re,sys\n\ndef loadShaderPrograms(shader_programsfile):\n shaders = []\n with open(shader_programsfile, \"r\") as fp:\n for line in fp:\n line = line[:-1]\n colonpos = line.find(\":\")\n shaderprogram_name = line[0:colonpos]\n shaders.append({'name':shaderprogram_name, 'shaders':line[colonpos+1:].split(\",\")})\n return shaders\n\ndef loadVariableNames(variables_file, variables_outfile):\n variables = []\n with open(variables_file, \"r\") as fp:\n for line in fp:\n line = line[:-1]\n if line.find(\"//\")!=0:\n variables.append(line)\n shaderNames = \"\"\n with open(variables_outfile, \"w\") as fp:\n for i in range(len(variables)):\n print(\"#define SHADERVAR_\"+variables[i]+\" \"+str(i), file=fp)\n shaderNames += \", \\\"\"+variables[i]+\"\\\"\"\n print(\"#define SHADERVAR_COUNT \"+str(len(variables)), file=fp)\n print(\"#define SHADERVAR_NAMES \"+shaderNames[2:], file=fp)\n return variables\n\ndef loadShader(shader_folder, shader_filename, is_include=False, macros={}):\n shader_type = -1\n shader_code = \"\"\n imports = []\n linecount = 0\n with open(shader_folder+shader_filename, \"r\") as fp:\n for line in fp:\n line = line[:-1]\n linecount += 1\n # Check the type\n if not is_include:\n if line.find(\"//vertex\")==0:\n shader_type = 0\n shader_code = \"#version 330\\\\n\"\n continue\n if line.find(\"//fragment\")==0:\n shader_type = 1\n shader_code = \"#version 330\\\\n\"\n continue\n # Remove comments\n line = re.sub(\"//.*$\", \"\", line)\n # define\n m = re.search(\"#define ([^\\s]+)( (.+))?\", line)\n if m:\n if len(m.groups())==3:\n macros[m.group(1)] = m.group(3)\n else:\n macros[m.group(1)] = True\n line = \"\"\n # import\n m = re.search(\"#include \\\"(.+)\\\"\", line)\n if m:\n includefile = m.group(1)\n shader = loadShader(shader_folder, includefile, is_include=True, macros=macros)\n line = line.replace(\"#include \\\"\"+includefile+\"\\\"\", shader['code'])\n macros = {**macros, **shader[\"macros\"]}\n imports += shader[\"imports\"]\n # Trim\n line = line.strip()\n # Remove blank lines\n if re.search(\"^\\s*$\", line):\n continue\n # Macros->ifdef/ifndef\n m = re.search(\"^\\s*#(ifdef|ifndef) (.*)\\s*$\", line)\n if m:\n sym = m.group(1)\n macro = m.group(2)\n\n if sym==\"ifdef\" or sym==\"ifndef\":\n if ( sym==\"ifdef\" and macros.get(macro)==None ) or ( sym==\"ifndef\" and macros.get(macro)!=None ):#If expression invalid\n for line2 in fp:#Skip to endif or else\n linecount += 1\n if re.search(\"^\\s*#endif\\s*$\", line2):\n break\n if re.search(\"^\\s*#else\\s*$\", line2):\n break\n continue\n # Macros->endif\n if re.search(\"^\\s*#endif\\s*$\", line):\n continue\n # Macros->Replace\n for macro in macros:\n if type(macros[macro])==str:\n line = line.replace(macro, macros[macro])\n # Imports\n m = re.search(\"^.*\\s*(import(\\((.*)\\))?\\s+).*?([a-zA-Z0-9_]+);.*$\", line)\n if m:\n line = line.replace(m.group(1), \"\")\n if m.group(3)==None:\n imports.append({'name':m.group(4), 'identifier':m.group(4), 'uniform':line.find(\"uniform\")>=0, 'file':shader_filename, 'line':linecount})\n else:\n imports.append({'name':m.group(3), 'identifier':m.group(4), 'uniform':line.find(\"uniform\")>=0, 'file':shader_filename, 'line':linecount})\n # Done\n shader_code += line+\"\\\\n\"\n if is_include==False and shader_type==-1:\n print(\"ERROR: \"+shader_filename+\" is not typed\")\n sys.exit(1)\n return {\"type\":shader_type, \"code\":shader_code, \"macros\":macros, \"imports\":imports}\n\ndef generateShaderMask(shader_variables, imports):\n mask = 0\n for i in range(len(shader_variables)):\n varname = shader_variables[i]\n for sv in imports:\n if sv[\"name\"]==varname:\n mask |= 1<<i\n return mask\n\ndef loadShaderfiles(shadervars, shader_folder=\"src/render/shaders/code/\"):\n shaderfiles = []\n shaderfiles_metadata = {}\n for f in os.listdir(shader_folder):\n if f[-2:]==\".c\":\n shaderfiles.append(f)\n meta = loadShader(shader_folder, f)\n meta['id'] = -1\n\n shadervar_mask = 0\n shadervar_overridesMask = 0\n shadervar_overrides = [None]*len(shadervars)\n shadervar_custom = []\n for theimport in meta[\"imports\"]:\n override = theimport[\"name\"]!=theimport[\"identifier\"]\n try:#Builtin\n varindex = shadervars.index(theimport[\"name\"])\n if override:\n print(theimport['file']+\":\"+str(theimport['line'])+\"\\t\\tOverride SHADERVAR \"+theimport[\"name\"]+\" with \"+theimport['identifier'], file=sys.stderr)\n shadervar_overridesMask |= (1<<varindex)\n shadervar_overrides[varindex] = theimport[\"identifier\"]\n else:\n shadervar_mask |= (1<<varindex)\n except ValueError:#Custom\n print(theimport['file']+\":\"+str(theimport['line'])+\"\\t\\tCustom SHADERVAR \"+theimport[\"name\"], file=sys.stderr)\n shadervar_custom.append(theimport[\"identifier\"])\n\n meta['shadervar_mask'] = shadervar_mask\n meta[\"shadervar_overridesMask\"] = shadervar_overridesMask\n meta['shadervar_overrides'] = shadervar_overrides\n meta['shadervar_custom'] = shadervar_custom\n\n shaderfiles_metadata[f] = meta\n return shaderfiles, shaderfiles_metadata\n\ndef combineOverrides(a, b):\n for i in range(len(a)):\n if b[i]!=None:\n if a[i]!=None:\n print(\"Override varaiables \"+a[i]+\" exist twice!\")\n sys.exit(1)\n a[i] = b[i]\n return a\n\n\ndef parseShaders(variables_file, variables_outfile, shaders_folder, shaders_file, shaders_outfile, shader_programs_out):\n shader_variables = loadVariableNames(variables_file, variables_outfile)\n shader_programs = loadShaderPrograms(shaders_file)\n shaders, shaderfiles_metadata = loadShaderfiles(shader_variables, shaders_folder)\n shaders_ordered = []\n shader_ids = {}\n shader_lastid = 0\n shader_header_buffer = \"\"\n shader_files_buffer = \"\"\n shader_programs_buffer = \"\"\n with open(shader_programs_out, \"w\") as fp:\n for shaderprogram_index in range(len(shader_programs)):\n shaderprogram = shader_programs[shaderprogram_index]\n print(\"#define SHADER_\"+shaderprogram[\"name\"]+\" \"+str(shaderprogram_index), file=fp)\n\n # ID the shader files\n shaderprogram_shadersline = \"\"\n shaderprogram_mask = 0\n shaderprogram_overrideMask = 0\n shaderprogram_overrides = [None]*len(shader_variables)\n shaderprogram_customsline = \"\"\n shaderprogram_customcount = 0\n\n # Collect the shaders into the program\n for shaderfile_filename in shaderprogram[\"shaders\"]:\n shaderfile_meta = shaderfiles_metadata[shaderfile_filename]\n if shaderfile_meta[\"id\"]==-1:#If new shaderfile\n shaderfile_meta[\"id\"] = shader_lastid\n shaders_ordered.append(shaderfile_filename)\n shader_files_buffer += \"shader_files[\"+str(shader_lastid)+\"] = ShaderProgram::compileShader(\\\"\"+shaderfile_filename+\"\\\", \"+str(shaderfile_meta['type'])+\", \\\"\"+shaderfile_meta[\"code\"]+\"\\\");\\n\"\n shader_lastid += 1\n shaderprogram_shadersline += \", shader_files[\"+str(shaderfile_meta[\"id\"])+\"]\"\n shaderprogram_mask |= shaderfile_meta[\"shadervar_mask\"]\n shaderprogram_overrideMask |= shaderfile_meta[\"shadervar_overridesMask\"]\n shaderprogram_overrides = combineOverrides(shaderprogram_overrides, shaderfile_meta[\"shadervar_overrides\"])\n for cv in shaderfile_meta[\"shadervar_custom\"]:\n shaderprogram_customsline += \", \\\"\"+cv+\"\\\"\"\n print(\"#define SHADER_\"+shaderprogram[\"name\"]+\"_\"+cv+\" \"+str(len(shader_variables)+shaderprogram_customcount), file=fp)\n shaderprogram_customcount += 1\n\n if len(shaderprogram_shadersline)>2:\n shaderprogram_shadersline = shaderprogram_shadersline[2:]\n if len(shaderprogram_customsline)>2:\n shaderprogram_customsline = shaderprogram_customsline[2:]\n shaderprogram_overridesline = \"\"\n for ovr in shaderprogram_overrides:\n if ovr!=None:\n shaderprogram_overridesline += \", \\\"\"+ovr+\"\\\"\"\n if len(shaderprogram_overridesline)>2:\n shaderprogram_overridesline = shaderprogram_overridesline[2:]\n\n # Write the shader program\n shader_programs_buffer += \"shader_program[\"+str(shaderprogram_index)+\"] = new ShaderProgram(\"+str(shaderprogram_index)+\", \\\"\"+shaderprogram[\"name\"]+\"\\\", {\"+shaderprogram_shadersline+\"}, \"+str(shaderprogram_mask)+\", \"+str(shaderprogram_overrideMask)+\", {\"+shaderprogram_overridesline+\"}, {\"+shaderprogram_customsline+\"});\\n\"\n\n # Write the header line\n shader_header_buffer = \"shader_files = new GLint[\"+str(len(shaders))+\"];\\nshader_program = (ShaderProgram **)malloc(sizeof(ShaderProgram *)*\"+str(len(shader_programs))+\");\\n\"\n\n with open(shaders_outfile, \"w\") as fp:\n print(shader_header_buffer, file=fp)\n print(shader_files_buffer, file=fp)\n print(shader_programs_buffer, file=fp)\n\nif __name__==\"__main__\":\n #SHADER_VARIABLES_IN = ../src/render/shaders/shader_variables.txt\n #SHADER_VARIABLES_OUT = ../src/render/shaders/ShaderVariables.h\n #SHADER_FOLDER = ../src/render/shaders/code/\n #SHADER_DESC = ../src/render/shaders/shaders.txt\n #SHADER_HEADER = ../src/render/shaders/ShaderPrograms.h\n #SHADER_LOADER = ../src/render/shaders/ShaderLoader.c\n variables_file = \"\"\n variables_outfile = \"\"\n shaders_folder = \"\"\n shaders_file = \"\"\n shaders_outfile = \"\"\n shader_programs_out = \"\"\n for i in range(len(sys.argv)):\n arg = sys.argv[i]\n if arg==\"--variables-in\":\n variables_file = sys.argv[i+1]\n elif arg==\"--variables-out\":\n variables_outfile = sys.argv[i+1]\n elif arg==\"--shaders-folder\":\n shaders_folder = sys.argv[i+1]\n elif arg==\"--shaders-desc\":\n shaders_file = sys.argv[i+1]\n elif arg==\"--shaders-header\":\n shader_programs_out = sys.argv[i+1]\n elif arg==\"--shaders-loader\":\n shaders_outfile = sys.argv[i+1]\n #parseShaders(sys.argv[2], sys.argv[4], sys.argv[6], sys.argv[8], sys.argv[10], sys.argv[12])\n parseShaders(variables_file, variables_outfile, shaders_folder, shaders_file, shaders_outfile, shader_programs_out)\n" }, { "alpha_fraction": 0.6498085260391235, "alphanum_fraction": 0.6713465452194214, "avg_line_length": 31.14358901977539, "blob_id": "2d0b56738555ae93be7090bef40970b40c0b8752", "content_id": "d1416af2b5031acd46d6f251d7bec486c1b4e1f0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6268, "license_type": "no_license", "max_line_length": 268, "num_lines": 195, "path": "/src/cpp/util/Boundaries.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"render/BasicShapes.hpp\"\n#include \"render/shaders/ShaderProgram.hpp\"\n#include <glm/gtc/matrix_transform.hpp>\n#include <glm/gtx/norm.hpp>\n\n#include \"Boundaries.hpp\"\n\nusing namespace render;\nusing namespace shaders;\nusing namespace util;\nusing namespace Boundaries;\n\nOBB::OBB(const glm::vec3 &min, const glm::vec3 &max)\n{\n\tthis->min = min;\n\tthis->max = max;\n}\nOBB::OBB(const float minX, const float minY, const float minZ, const float maxX, const float maxY, const float maxZ)\n{\n\tthis->min.x = minX;\n\tthis->min.y = minY;\n\tthis->min.z = minZ;\n\tthis->max.x = maxX;\n\tthis->max.y = maxY;\n\tthis->max.z = maxZ;\n}\n\nAABB::AABB()\n{\n\tAABB(0.f, 0.f, 0.f, 0.f, 0.f, 0.f);\n}\nAABB::AABB(const float boxCenterX, const float boxCenterY, const float boxCenterZ, const float boxHalfSizeX, const float boxHalfSizeY, const float boxHalfSizeZ)\n{\n\tthis->boxCenter[0] = boxCenterX;\n\tthis->boxCenter[1] = boxCenterY;\n\tthis->boxCenter[2] = boxCenterZ;\n\tthis->boxHalfSize[0] = boxHalfSizeX;\n\tthis->boxHalfSize[1] = boxHalfSizeY;\n\tthis->boxHalfSize[2] = boxHalfSizeZ;\n}\nAABB::AABB(const glm::vec3 &min, const glm::vec3 &max)\n{\n\tthis->boxCenter[0] = (min.x+max.x)/2.f;\n\tthis->boxCenter[1] = (min.y+max.y)/2.f;\n\tthis->boxCenter[2] = (min.z+max.z)/2.f;\n\tthis->boxHalfSize[0] = (max.x-min.x)/2.f;\n\tthis->boxHalfSize[1] = (max.y-min.y)/2.f;\n\tthis->boxHalfSize[2] = (max.z-min.z)/2.f;\n}\nAABB::~AABB()\n{\n\t\n}\nAABB AABB::translate(const glm::vec3 &offset)\n{\n\tAABB aabb(this->min()+offset, this->max()+offset);\n\treturn aabb;\n}\nOBB AABB::rotate(const glm::quat &rotation)\n{\n\tOBB obb(rotation*this->min(), rotation*this->max());\n\treturn obb;\n}\nbool AABB::checkInside(const glm::vec3 v)\n{\n\treturn\n\t\tthis->minX()<=v.x && this->maxX()>=v.x &&\n\t\tthis->minY()<=v.y && this->maxY()>=v.y &&\n\t\tthis->minZ()<=v.z && this->maxZ()>=v.z\n\t;\n}\nbool AABB::checkIntersect(const AABB &aabb)\n{\n\t// a.Min>b.Max || a.Max<b.Min ||\n\treturn !(\n\t\tthis->boxCenter[0]-this->boxHalfSize[0]>aabb.boxCenter[0]+aabb.boxHalfSize[0] || this->boxCenter[0]+this->boxHalfSize[0]<aabb.boxCenter[0]-aabb.boxHalfSize[0] ||\n\t\tthis->boxCenter[1]-this->boxHalfSize[1]>aabb.boxCenter[1]+aabb.boxHalfSize[1] || this->boxCenter[1]+this->boxHalfSize[1]<aabb.boxCenter[1]-aabb.boxHalfSize[1] ||\n\t\tthis->boxCenter[2]-this->boxHalfSize[2]>aabb.boxCenter[2]+aabb.boxHalfSize[2] || this->boxCenter[2]+this->boxHalfSize[2]<aabb.boxCenter[2]-aabb.boxHalfSize[2]\n\t);\n}\nbool AABB::checkIntersect(const Sphere &rbb)\n{\n\treturn glm::length2(glm::max(glm::min(this->max(), rbb.center), this->min())-rbb.center) < rbb.radius*rbb.radius;\n}\nvoid AABB::render(RenderManager &rManager, glm::vec4 color, bool solid)\n{\n\trManager.pushMatrixM();\n\trManager.M = glm::scale(glm::translate(rManager.M, glm::vec3(this->boxCenter[0]-this->boxHalfSize[0], this->boxCenter[1]-this->boxHalfSize[1], this->boxCenter[2]-this->boxHalfSize[2])), glm::vec3(this->boxHalfSize[0]*2,this->boxHalfSize[1]*2,this->boxHalfSize[2]*2));\n\trManager.markMDirty();\n\tif(color.a!=1.0f)\n\t\trManager.enableTransparency();\n\tShaderProgram *sp = rManager.useShader(SHADER_solidColor);\n\tglUniform4f(sp->getShaderLocation(true, SHADER_solidColor_solidColor), color.x, color.y, color.z, color.a);\n\tif(solid)\n\t\tBasicShapes::renderUnitCube(sp->getShaderLocation(false, SHADERVAR_vertex_position));\n\telse\n\t\tBasicShapes::renderUnitCubeFrame(sp->getShaderLocation(false, SHADERVAR_vertex_position));\n\tif(color.a!=1.0f)\n\t\trManager.disableTransparency();\n\trManager.popMatrixM();\n}\nfloat AABB::rayCastDistance(Raycast &raycast)\n{\n\tfloat md = raycast.maxDistance==0 ? std::numeric_limits<float>::max()-1 : raycast.maxDistance;\n\tfloat t[2][3];\n\tt[0][0] = (this->minX()-raycast.origin.x)/raycast.direction.x;\n\tt[0][1] = (this->minY()-raycast.origin.y)/raycast.direction.y;\n\tt[0][2] = (this->minZ()-raycast.origin.z)/raycast.direction.z;\n\t\n\tt[1][0] = (this->maxX()-raycast.origin.x)/raycast.direction.x;\n\tt[1][1] = (this->maxY()-raycast.origin.y)/raycast.direction.y;\n\tt[1][2] = (this->maxZ()-raycast.origin.z)/raycast.direction.z;\n\t\n\tfloat tmax = std::numeric_limits<float>::min();\n\tfloat tmin = std::numeric_limits<float>::max();\n\tfloat tmp;\n\tfor(int i=0;i<3;i++)\n\t{\n\t\tfloat &ntMin = t[0][i];\n\t\tfloat &ntMax = t[1][i];\n\t\tif(ntMin>ntMax)\n\t\t{\n\t\t\ttmp = ntMin;\n\t\t\tntMin = ntMax;\n\t\t\tntMax = tmp;\n\t\t}\n\t\tif(ntMax < tmax || ntMin > tmin)\n\t\t\treturn -1;\n\t\tif(ntMin > tmax)\n\t\t\ttmax = ntMin;\n\t\tif(ntMax < tmin)\n\t\t\ttmin = ntMax;\n\t}\n\tif(tmax > tmin || tmax >= md)\n\t\treturn -1;\n\treturn tmax;\n}\nbool AABB::rayCastCheck(Raycast &raycast)\n{\n\treturn this->rayCastDistance(raycast)!=-1;\n}\nRaycastResult *AABB::rayCast(Raycast &raycast)\n{\n\tfloat md = raycast.maxDistance==0 ? std::numeric_limits<float>::max() : raycast.maxDistance;\n\tfloat t[2][3];\n\tt[0][0] = (this->minX()-raycast.origin.x)/raycast.direction.x;\n\tt[0][1] = (this->minY()-raycast.origin.y)/raycast.direction.y;\n\tt[0][2] = (this->minZ()-raycast.origin.z)/raycast.direction.z;\n\t\n\tt[1][0] = (this->maxX()-raycast.origin.x)/raycast.direction.x;\n\tt[1][1] = (this->maxY()-raycast.origin.y)/raycast.direction.y;\n\tt[1][2] = (this->maxZ()-raycast.origin.z)/raycast.direction.z;\n\t\n\tfloat tmax = std::numeric_limits<float>::min();\n\tfloat tmin = std::numeric_limits<float>::max();\n\tfloat tmp;\n\tfor(int i=0;i<3;i++)\n\t{\n\t\tfloat &ntMin = t[0][i];\n\t\tfloat &ntMax = t[1][i];\n\t\tif(ntMin>ntMax)\n\t\t{\n\t\t\ttmp = ntMin;\n\t\t\tntMin = ntMax;\n\t\t\tntMax = tmp;\n\t\t}\n\t\tif(ntMax < tmax || ntMin > tmin)\n\t\t\treturn 0;\n\t\tif(ntMin > tmax)\n\t\t\ttmax = ntMin;\n\t\tif(ntMax < tmin)\n\t\t\ttmin = ntMax;\n\t}\n\tif(tmax > tmin || tmax >= md)\n\t\treturn 0;\n\tRaycastResult *result = new RaycastResult;\n\tresult->distance = tmax;\n\tresult->hit = true;\n\t//result->rayHit = glm::normalize(raycast.direction)*tmax;\n\treturn result;\n}\nglm::vec3 AABB::min()\n{\n\treturn glm::vec3(this->minX(), this->minY(), this->minZ());\n}\nglm::vec3 AABB::max()\n{\n\treturn glm::vec3(this->maxX(), this->maxY(), this->maxZ());\n}\nfloat AABB::minX() { return this->boxCenter[0]-this->boxHalfSize[0]; }\nfloat AABB::minY() { return this->boxCenter[1]-this->boxHalfSize[1]; }\nfloat AABB::minZ() { return this->boxCenter[2]-this->boxHalfSize[2]; }\nfloat AABB::maxX() { return this->boxCenter[0]+this->boxHalfSize[0]; }\nfloat AABB::maxY() { return this->boxCenter[1]+this->boxHalfSize[1]; }\nfloat AABB::maxZ() { return this->boxCenter[2]+this->boxHalfSize[2]; }\n" }, { "alpha_fraction": 0.7144973278045654, "alphanum_fraction": 0.7270875573158264, "avg_line_length": 32.339508056640625, "blob_id": "2eb243a1f9d8ca20f6877ac1d33173322d5062b3", "content_id": "b84c75e2a2a91b8cdf81e56ffa28d73ff1f2b4c4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5401, "license_type": "no_license", "max_line_length": 214, "num_lines": 162, "path": "/src/cpp/world/entities/SecurityCamera.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include <cmath>\n#include <iostream>\n#include <glm/gtx/quaternion.hpp>\n#include <glm/gtc/matrix_transform.hpp>\n#include <glm/gtx/vector_angle.hpp>\n\n#include \"util/gl.h\"\n\n#include \"render/SkeletalAnimation.hpp\"\n#include \"render/BasicShapes.hpp\"\n#include \"render/DDSImage.hpp\"\n\n#include \"world/entities/Enemy.hpp\"\n\n#include \"res/md5/security_camera.md5mesh.h\"\n\n#include \"SecurityCamera.hpp\"\n\nusing namespace world;\nusing namespace entities;\nusing namespace render;\nusing namespace ai;\n\nSecurityCamera::SecurityCamera() : super()\n{\n\tthis->model = (SkeletalModel *)util::AssetManager::getAssetManager()->getAsset(ASSET_SECURITY_CAMERA_MD5MESH);\n\tthis->perception = new PerceptionManager(this, {&typeid(Enemy)}, this->model->bindPoseSkeleton[ASSET_SECURITY_CAMERA_MD5MESH_JOINT_CAMERASTALK].pos);\n\tthis->perception->setYawBounds(-M_PI/2, M_PI/2);\n\tthis->initAnimation = (SkeletalAnimation *)util::AssetManager::getAssetManager()->getAsset(ASSET_SECURITY_CAMERA_MD5ANIM);\n\tthis->warningImage = (render::DDSImage *)util::AssetManager::getAssetManager()->getAsset(ASSET_ATTENTION_PNG);\n\tthis->animationDuration = this->initAnimation->getAnimationDuration();\n\tthis->animationCurrent = 0;\n\tthis->initAnimating = true;\n\tthis->selector = false;\n\tthis->bounds = &this->initAnimation->getFrameBounds(-1);\n\n//\t// Setup billboard\n//\tglGenVertexArrays(1, &this->billboardVertexArrayId);\n//\tglBindVertexArray(this->billboardVertexArrayId);\n//\tglGenBuffers(1, &this->billboardVertexPositionBufferID);\n//\tglBindBuffer(GL_ARRAY_BUFFER, this->billboardVertexPositionBufferID);\n//\tstatic const GLfloat g_vertex_buffer_data[] = {\n//\t\t1.f, 1.f,\n//\t\t-1.f, 1.f,\n//\t\t-1.f, -1.f,\n//\t\t1.f, -1.f,\n//\t};\n//\tglBufferData(GL_ARRAY_BUFFER, sizeof(g_vertex_buffer_data), g_vertex_buffer_data, GL_STATIC_DRAW);\n}\nSecurityCamera::~SecurityCamera()\n{\n\t\n}\n\n#define TRACKING_SPEED 10\n\nvoid SecurityCamera::tick(util::DeltaTime &deltaTime)\n{\n\tif(this->selector)\n\t\treturn;\n\tif(this->initAnimating)\n\t{\n\t\tif(deltaTime.getTime()-this->getSpawnTime()<=this->animationDuration)\n\t\t{\n\t\t\tthis->animationCurrent = deltaTime.getTime()-this->getSpawnTime();\n\t\t\tthis->bounds = &this->initAnimation->getFrameBounds(this->initAnimation->getFrame(this->animationCurrent));\n\t\t}\n\t\telse\n\t\t{\n\t\t\tthis->initAnimating = false;\n\t\t\tthis->bounds = &this->initAnimation->getFrameBounds(-1);\n\t\t}\n\t}\n\telse\n\t{\n\t\tthis->perception->tick(deltaTime);\n\t\tif(this->perception->getTargetEntity()!=0)\n\t\t{\n\t\t\tthis->warningStop = deltaTime.getTime()+3;\n\t\t\tthis->warning = true;\n\t\t}\n\t\telse\n\t\t{\n\t\t\tthis->warning = deltaTime.getTime()<this->warningStop;\n\t\t}\n\t}\n}\nvoid SecurityCamera::render(RenderManager &rManager)\n{\n\t//this->setOrientation(this->getOrientation()*glm::quat(glm::vec3(0, 0, glm::radians((float)(std::rand()%100)/10))));\n\t\n\t\n\trManager.pushMatrixM();\n\trManager.M = glm::translate(rManager.M, this->getPosition())*glm::toMat4(this->getOrientation());\n\trManager.markMDirty();\n\trManager.useShader(SHADER_fuzzyModel);\n\t\n\t/*Skeleton skel = this->model->bindPoseSkeleton;\n\tthis->model->render(rManager, skel);*/\n\t//std::cout << \"R time=\" << this->animTime << std::endl;\n\t//this->initAnimation->render(rManager, *this->model, this->animTime);\n\t//drone->renderSkeleton(rManager, skel);\n\t//drone->renderWeights(rManager, skel);\n\tif(this->selector)\n\t{\n\t\tthis->model->render(rManager, this->initAnimation->getSkeleton(0));\n\t}\n\telse if(this->initAnimating)\n\t{\n\t\tthis->model->render(rManager, this->initAnimation->getSkeleton(this->animationCurrent));\n\t\t//this->initAnimation->renderBounds(rManager, this->animationCurrent);\n\t}\n\telse\n\t{\n\t\tSkeleton skel = this->model->bindPoseSkeleton;\n\t\tskel[ASSET_SECURITY_CAMERA_MD5MESH_JOINT_CAMERASTALK].ori = this->perception->getOrientation();\n\t\tthis->model->render(rManager, skel);\n\t\t//this->initAnimation->renderBounds(rManager, this->animationCurrent);\n\t\t\n\t\t\n\t\t// Render billboard icon\n\t\tif(this->warning)\n\t\t{\n\t\t\trManager.disableDepth();\n\t\t\trManager.enableTransparency();\n\t\t\t\n\t\t\trender::shaders::ShaderProgram *prog = rManager.useShader(SHADER_billboard);\n\t\t\tglm::vec3 billpoc = this->getPosition()+this->getOrientation()*(skel[ASSET_SECURITY_CAMERA_MD5MESH_JOINT_MOTIONSENSOR].pos+skel[ASSET_SECURITY_CAMERA_MD5MESH_JOINT_MOTIONSENSOR].ori*glm::vec3(0.f, 0.04f, 0.0f));\n\t\t\tglUniform3fv(prog->getShaderLocation(true, SHADER_billboard_billboard_center), 1, &billpoc[0]);\n\n\t\t\tglActiveTexture(GL_TEXTURE0);\n\t\t\tthis->warningImage->bindTexture();\n\t\t\tglUniform1i(prog->getShaderLocation(true, SHADERVAR_material_map_Kd), 0);\n\n\t\t\tfloat sizeMM = 10.f;\n\t\t\tglm::vec2 scaler = glm::vec2(sizeMM/rManager.getWidthMM(), sizeMM/rManager.getHeightMM());\n\t\t\tglUniform2fv(prog->getShaderLocation(true, SHADER_billboard_screen_scaler), 1, &scaler[0]);\n\n\t\t\trManager.disableCullFace();\n\n\t\t\trender::BasicShapes::renderUnitSquare(prog->getShaderLocation(false, SHADERVAR_vertex_position));\n\t\t\trManager.enableCullFace();\n\n\t\t\t//glBindVertexArray(this->billboardVertexArrayId);\n\t\t\t//glBindBuffer(GL_ARRAY_BUFFER, this->billboardVertexPositionBufferID);\n\t\t\t//prog->setVertexAttributePointer(SHADERVAR_vertex_position, 2, GL_FLOAT, GL_FALSE, 0, 0);\n\t\t\t//glDrawArrays(GL_QUADS, 0, 4);\n\n\t\t\trManager.disableTransparency();\n\t\t\trManager.enableDepth();\n\t\t}\n\t}\n\t//this->getBounds()->render(rManager, glm::vec4(1.f, 1.f, 0.f, 1.f), false);\n\trManager.popMatrixM();\n\n}\n\n//controllable by player\n//\tupdate quat\n//controllable by scripts\n//\tlookAt(vector)\n//\tkeepLookingAt(Entity*)\n" }, { "alpha_fraction": 0.7076813578605652, "alphanum_fraction": 0.7105262875556946, "avg_line_length": 19.676469802856445, "blob_id": "b3818fcbcc26d06f44cddefa3bee200b22830353", "content_id": "51dc4b958c239d9aeca3566b5454a7506045ce8b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1406, "license_type": "no_license", "max_line_length": 71, "num_lines": 68, "path": "/src/cpp/render/MaterialLibrary.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_RENDER_MATERIALLIBRARY_HPP_\n#define FYP_RENDER_MATERIALLIBRARY_HPP_\n\nnamespace render {\n\tclass MaterialLibrary;\n\ttypedef struct Material Material;\n\tnamespace shaders {\n\t\tclass ShaderProgram;\n\t}\n\tstruct MaterialAsset {\n\t\tint assetId;\n\t\tint materialId;\n\t};\n}\n\n#include \"render/MaterialData.h\"\n#include \"render/shaders/ShaderProgram.hpp\"\n#include \"util/AssetManager.hpp\"\n#include <vector>\n#include <ostream>\n\nstd::ostream &operator<<(std::ostream &ost, const render::Material &m);\n\nnamespace render {\n\n\n\ttypedef struct MaterialAsset MaterialAsset;\n\n\tstruct Material {\n\t\tstd::string name;\n\t\tint flags;\n\n\t\tfloat Ka[3];\n\t\tfloat Kd[3];\n\t\tfloat Ks[3];\n\t\tfloat Tf[3];\n\t\tfloat d;\n\t\tfloat Ns;\n\t\tfloat Ni;\n\t\tint illum;\n\t\tint sharpness;\n\t\tint map_Ka;\n\t\tint map_Kd;\n\t\tint map_Ks;\n\t\tint map_Ns;\n\t\tint map_d;\n\t\tint disp;\n\t\tint decal;\n\t\tint bump;\n\t};\n\n\tclass MaterialLibrary : public util::Asset {\n\t\tpublic:\n\t\t\tMaterialLibrary(int assetId, std::istream &fp);\n\t\t\t~MaterialLibrary();\n\t\t\tvirtual void write(std::ostream &ost) const;\n\t\t\tvirtual void postload();\n\t\t\tvoid printMaterial(std::ostream &ost);\n\t\t\tMaterial *getMaterial(int materialId);\n\t\t\tvoid updateShader(shaders::ShaderProgram *shader, int materialId);\n\t\tprivate:\n\t\t\tstd::vector<Material> materials;\n\t};\n}\nbool operator==(render::MaterialAsset &a, render::MaterialAsset &b);\nbool operator!=(render::MaterialAsset &a, render::MaterialAsset &b);\n\n#endif\n" }, { "alpha_fraction": 0.6921659111976624, "alphanum_fraction": 0.6921659111976624, "avg_line_length": 20.27450942993164, "blob_id": "fb7e3bcf164cc29c3481bc72957be723110e5ded", "content_id": "457a229d102371ef66227ed0b3713c5ff40706a7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1085, "license_type": "no_license", "max_line_length": 85, "num_lines": 51, "path": "/src/cpp/gui/Screen.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_GUI_SCREEN_HPP_\n#define FYP_GUI_SCREEN_HPP_\n\n#include <list>\n#include \"../util/DeltaTime.hpp\"\n\n#include \"Element.hpp\"\n\nclass ScreenManager;\n\nclass Screen {\n\tprotected:\n\t\tElement *selectedElement;\n\t\tstd::list<Element *> elements;\n\t\tbool elementSelectedWithMouse;\n\t\tElement *firstNext,*firstPrev,*firstLeft,*firstRight,*firstUp,*firstDown;\n\tpublic:\n\t\tScreenManager *manager;\n\t\t\n\t\tScreen();\n\t\tvirtual ~Screen();\n\n\t\t// Misc\n\t\t/**\n\t\t * Adds an element to the screen\n\t\t */\n\t\tvoid addElement(Element *e);\n\t\t/**\n\t\t * Renders the GUI screen\n\t\t */\n\t\tvirtual void render(util::DeltaTime &deltaTime, render::RenderManager &manager);\n\t\tvoid selectElement(Element *element, bool mouseSelection);\n\n\t\t// Events\n\t\t/**\n\t\t * On control boolean update\n\t\t */\n\t\tvirtual bool onControlEvent(int control, int action);\n\t\t/**\n\t\t * On control delta update\n\t\t */\n\t\tvirtual bool onControlEvent(int control, double x, double y, double dx, double dy);\n\t\t/**\n\t\t * Called by the screen manager whenever the screen resizes\n\t\t */\n\t\tvirtual void onScreenResize();\n\n\t\tvirtual bool supportsCursor();\n};\n\n#endif\n" }, { "alpha_fraction": 0.6751188635826111, "alphanum_fraction": 0.7226624488830566, "avg_line_length": 18.71875, "blob_id": "74c957f73b643e57591bc200ba729d2c59d7f7e5", "content_id": "e11278ff3616fbbcaf2216270760c0b87eb0e3c2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 631, "license_type": "no_license", "max_line_length": 47, "num_lines": 32, "path": "/src/cpp/render/DDSImage.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_RENDER_DDSIMAGE_HPP_\n#define FYP_RENDER_DDSIMAGE_HPP_\n\n#define DWFOURCC_DXT1 0x31545844\n#define DWFOURCC_DXT3 0x33545844\n#define DWFOURCC_DXT5 0x35545844\n\nnamespace render {\n\tclass DDSImage;\n}\n\n#include \"util/gl.h\"\n#include \"util/AssetManager.hpp\"\n\nnamespace render {\n\tclass DDSImage : public util::Asset {\n\t\tpublic:\n\t\t\tDDSImage(int assetId, std::istream &fp);\n\t\t\tvirtual ~DDSImage();\n\t\t\tvirtual void write(std::ostream &ost) const;\n\t\t\tvirtual void postload();\n\t\t\tvoid bindTexture();\n\t\tprivate:\n\t\t\tbool imagePushed;\n\t\t\tint imageDataSize;\n\t\t\tunsigned char *imageData;\n\t\t\tGLuint textureID;\n\t\t\tGLint format;\n\t};\n}\n\n#endif\n" }, { "alpha_fraction": 0.7082657814025879, "alphanum_fraction": 0.7163695096969604, "avg_line_length": 33.22222137451172, "blob_id": "f0dd439941df00bc8dda878fac69806212442c67", "content_id": "bed2519500997d94d30f4fba1aa854cca674e152", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 617, "license_type": "no_license", "max_line_length": 90, "num_lines": 18, "path": "/hooks/asset_convert_image.py", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\nimport sys,subprocess\n\nfrom wand.image import Image\nfrom asset_common import writeType\n\n#cmd=\"convert -format dds -define dds:mipmaps=5 -define dds:compression=dxt1\"\ncmd=\"convert -format dds -define dds:compression=dxt5\"\n\ndef convertImage(infile_path, outfile_fp):\n p = subprocess.Popen(cmd+\" \"+infile_path+\" dds:-\", shell=True, stdout=subprocess.PIPE)\n img_blob = p.stdout.read() #[4:]\n dataToWrite = img_blob\n writeType(outfile_fp, [len(dataToWrite)])\n amountWritten = outfile_fp.write(dataToWrite)\n #print(\"\\t\"+str(amountWritten), \"bytes written\")\n outfile_fp.flush()\n\n" }, { "alpha_fraction": 0.6666666865348816, "alphanum_fraction": 0.6928104758262634, "avg_line_length": 18.125, "blob_id": "d02db47f310739773218759d46a7810fa80439a7", "content_id": "f1b2561c9cdee63b3de6990b8f7af9bde75c270f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 459, "license_type": "no_license", "max_line_length": 60, "num_lines": 24, "path": "/src/cpp/world/entities/Player.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"util/Boundaries.hpp\"\n#include \"world/World.hpp\"\n\n#include \"Player.hpp\"\n\n\nusing namespace world;\nusing namespace entities;\nusing namespace util::Boundaries;\n\nPlayer::Player() : super()\n{\n\tthis->doRender = false;\n\tthis->bounds = new AABB(0.f, 0.f, 0.f, 0.4f, 0.4f, 0.4f);\n\tthis->setBulletHealth(1.f, 2);\n}\nPlayer::~Player()\n{\n\t\n}\nvoid Player::die(double time, glm::vec3 direction, int type)\n{\n\tthis->getWorld().gameOver(GAME_OVER_PLAYER_DESTROYED);\n}\n" }, { "alpha_fraction": 0.7266666889190674, "alphanum_fraction": 0.7266666889190674, "avg_line_length": 14, "blob_id": "24c2a3e85fa2a2a4457864bd548b1f48fcb2d295", "content_id": "ab25eb826bf022fc05901e0b3e9f9a1f513a1f19", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 150, "license_type": "no_license", "max_line_length": 33, "num_lines": 10, "path": "/src/cpp/ai/ActionPlanner.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_AI_ACTIONPLANNER_HPP_\n#define FYP_AI_ACTIONPLANNER_HPP_\n\nclass ActionPlanner {\n\tpublic:\n\t\tActionPlanner();\n\t\t~ActionPlanner();\n};\n\n#endif\n" }, { "alpha_fraction": 0.7568627595901489, "alphanum_fraction": 0.7568627595901489, "avg_line_length": 18.615385055541992, "blob_id": "691122c9070e1daef3bc4b49aceaf3e16b3d761f", "content_id": "8734a63c22dfc19f473cebbfad52be8835f61d22", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 255, "license_type": "no_license", "max_line_length": 61, "num_lines": 13, "path": "/src/cpp/util/AssetUtils.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_UTIL_ASSETUTILS_HPP_\n#define FYP_UTIL_ASSETUTILS_HPP_\n\n#include \"render/MaterialLibrary.hpp\"\n\nnamespace util {\n\tnamespace AssetUtils {\n\t\trender::Material *getMaterial(int assetId, int materialId);\n\t\tvoid bindTexture(int assetId);\n\t}\n}\n\n#endif\n" }, { "alpha_fraction": 0.6929008960723877, "alphanum_fraction": 0.7074794173240662, "avg_line_length": 32.928314208984375, "blob_id": "243e062e58ccbd150ecc530a2ce354cdbc04479a", "content_id": "94aecf687a28de61fab7440c4bcf2be7f01c7c92", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 9466, "license_type": "no_license", "max_line_length": 199, "num_lines": 279, "path": "/src/cpp/render/SkeletalModel.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include <iostream>\n#include \"util/StreamUtils.hpp\"\n#include \"util/QuaternionUtils.hpp\"\n\n//#ifdef ENABLE_DEBUG_RENDER_MD5JOINT\n#include \"render/BasicShapes.hpp\"\n#include \"render/shaders/ShaderUtils.hpp\"\n#include \"util/gl.h\"\n#include <glm/gtc/matrix_transform.hpp>\n#include \"util/AssetManager.hpp\"\n#include \"render/MaterialLibrary.hpp\"\n#include \"render/DDSImage.hpp\"\n//#endif\n\n#include \"SkeletalModel.hpp\"\n\nusing namespace util;\nusing namespace util::StreamUtils;\nusing namespace render;\n\nSkeletalModel::SkeletalModel(int assetId, std::istream &fp) : Asset(assetId)\n{\n\tthis->setName(readString(fp));\n\t//load joints and meshes\n\tint numJoints = readInt(fp);\n\tfor(int j=0;j<numJoints;j++)\n\t{\n\t\tMD5Joint joint;\n\t\tMD5Bone bone;\n\t\t// Read the joint data\n\t\tjoint.index = j;\n\t\tjoint.name = readString(fp);\n\t\tjoint.parent = readInt(fp);\n\t\tbone.pos.x = readFloat(fp);\n\t\tbone.pos.y = readFloat(fp);\n\t\tbone.pos.z = readFloat(fp);\n\t\tbone.ori.x = readFloat(fp);\n\t\tbone.ori.y = readFloat(fp);\n\t\tbone.ori.z = readFloat(fp);\n\t\tQuaternionUtils::calculateQuaternionW(bone.ori);\n\t\tjoints.push_back(joint);\n\t\tbindPoseSkeleton.push_back(bone);\n\t}\n\tint numMeshes = readInt(fp);\n\t//meshes.append((shader_name, numverts, verts, numtris, tris, numweights, weights))\n\tfor(int i=0;i<numMeshes;i++)\n\t{\n\t\tMD5Mesh mesh;\n\t\tmesh.mtlAssetId = readInt(fp);\n\t\tmesh.materialId = readInt(fp);\n\t\tint numverts = readInt(fp);\n\t\tmesh.indecies.clear();\n\t\t//verts.append((vertIndex, tex, startWeight, countWeight))\n\t\tfor(int i=0;i<numverts;i++)\n\t\t{\n\t\t\tMD5Vertex vertex;\n\t\t\tvertex.index = readInt(fp);\n\t\t\tmesh.textureUVs.push_back(readFloat(fp));\n\t\t\tmesh.textureUVs.push_back(readFloat(fp));\n\t\t\tvertex.startWeight = readInt(fp);\n\t\t\tvertex.countWeight = readInt(fp);\n\t\t\tmesh.verts.push_back(vertex);\n\t\t}\n\t\tint numtris = readInt(fp);\n\t\t//tris.append((triIndex, vertIndex0, vertIndex1, vertIndex2))\n\t\tfor(int i=0;i<numtris;i++)\n\t\t{\n\t\t\treadInt(fp);\n\t\t\tmesh.indecies.push_back(readInt(fp));\n\t\t\tmesh.indecies.push_back(readInt(fp));\n\t\t\tmesh.indecies.push_back(readInt(fp));\n\t\t}\n\t\tint numweights = readInt(fp);\n\t\t//weights.append((weightIndex, joint, bias, pos))\n\t\tfor(int i=0;i<numweights;i++)\n\t\t{\n\t\t\tMD5Weight weight;\n\t\t\tweight.index = readInt(fp);\n\t\t\tweight.joint = readInt(fp);\n\t\t\tweight.bias = readFloat(fp);\n\t\t\tweight.pos.x = readFloat(fp);\n\t\t\tweight.pos.y = readFloat(fp);\n\t\t\tweight.pos.z = readFloat(fp);\n\t\t\tmesh.weights.push_back(weight);\n\t\t}\n\t\tmeshes.push_back(mesh);\n\t}\n\tthis->vertexArrayID = 0;\n\tfor(auto &mesh : this->meshes)\n\t{\n\t\tmesh.vertexBufferID = 0;\n\t\tmesh.vertexTextureBufferID = 0;\n\t\tmesh.vertexColorBufferID = 0;\n\t\tmesh.indexBufferID = 0;\n\t}\n}\nSkeletalModel::~SkeletalModel()\n{\n\t\n}\nvoid SkeletalModel::render()\n{\n\t//Select shader\n\t//FOR EACH MESH\n\t\t//Calculate vertex positions\n\t\t//Update vertex buffers\n\t\t\n\t\t//Push shader variables\n\t\t//draw\n}\nvoid SkeletalModel::write(std::ostream &ost) const\n{\n\tost << \"[\" << this->getAssetID() << \":\" << this->getName() << \".md5mesh]\";\n\tfor(const render::MD5Joint &joint : this->joints)\n\t{\n\t\tost << joint;\n\t}\n}\nvoid SkeletalModel::postload()\n{\n\tglGenVertexArrays(1, &this->vertexArrayID);\n\tglBindVertexArray(this->vertexArrayID);\n\tfor(render::MD5Mesh &mesh : this->meshes)\n\t{\n\t\tglGenBuffers(1, &mesh.vertexBufferID);\n\t\tglGenBuffers(1, &mesh.vertexTextureBufferID);\n\t\tglGenBuffers(1, &mesh.indexBufferID);\n\t\tglGenBuffers(1, &mesh.vertexColorBufferID);\n\n\t\tglBindBuffer(GL_ARRAY_BUFFER, mesh.vertexTextureBufferID);\n\t\tglBufferData(GL_ARRAY_BUFFER, mesh.textureUVs.size()*sizeof(GLfloat), &mesh.textureUVs[0], GL_STATIC_DRAW);\n\n\t\tglBindBuffer(GL_ELEMENT_ARRAY_BUFFER, mesh.indexBufferID);\n\t\tglBufferData(GL_ELEMENT_ARRAY_BUFFER, mesh.indecies.size()*sizeof(GLuint), &mesh.indecies[0], GL_STATIC_DRAW);\n\n\t\tGLfloat *vertexColorData = new GLfloat[mesh.verts.size()*3*sizeof(GLfloat)];\n\t\tfor(int v=0;v<(int)mesh.verts.size();v++)\n\t\t{\n\t\t\tvertexColorData[v*3+0] = (float)(std::rand()%1000)/1000;\n\t\t\tvertexColorData[v*3+1] = (float)(std::rand()%1000)/1000;\n\t\t\tvertexColorData[v*3+2] = (float)(std::rand()%1000)/1000;\n\t\t}\n\n\t\tglBindBuffer(GL_ARRAY_BUFFER, mesh.vertexColorBufferID);\n\t\tglBufferData(GL_ARRAY_BUFFER, mesh.verts.size()*3*sizeof(GLfloat), vertexColorData, GL_STATIC_DRAW);\n\t\tdelete [] vertexColorData;\n\t}\n}\n//#ifdef ENABLE_DEBUG_RENDER_MD5JOINT\nvoid SkeletalModel::renderSkeleton(render::RenderManager &rManager, const Skeleton &skeleton)\n{\n\trManager.disableDepth();\n\trManager.useShader(SHADER_solidColor);\n\tshaders::ShaderProgram *shader = shaders::ShaderProgram::getShader(SHADER_solidColor);\n\tGLint loc = shader->getShaderLocation(true, SHADER_solidColor_solidColor);\n\tGLint vploc = shader->getShaderLocation(false, SHADERVAR_vertex_position);\n\tfloat f = 0;\n\tfor(MD5Joint &joint : this->joints)\n\t{\n\t\tif(joint.parent>=0)\n\t\t{\n\t\t\tglUniform4f(loc, 1.f, 0.0f, 0.0f, 1.0f);\n\t\t\tBasicShapes::drawLine(skeleton[joint.index].pos, skeleton[joint.parent].pos, vploc);\n\t\t}\n\t\tf += 1/(float)this->joints.size();\n\t}\n\trManager.enableDepth();\n}\nvoid SkeletalModel::renderWeights(render::RenderManager &rManager, const Skeleton &skeleton)\n{\n\trManager.disableDepth();\n\tfloat f = 0;\n\trManager.useShader(SHADER_solidColor);\n\tshaders::ShaderProgram *shader = shaders::ShaderProgram::getShader(SHADER_solidColor);\n\tGLint loc = shader->getShaderLocation(true, SHADER_solidColor_solidColor);\n\tGLint vploc = shader->getShaderLocation(false, SHADERVAR_vertex_position);\n\tfor(MD5Mesh &mesh : this->meshes)\n\t{\n\t\tfor(MD5Weight &weight : mesh.weights)\n\t\t{\n\t\t\t// Weight to Joint\n\t\t\tMD5Joint &joint = this->joints[weight.joint];\n\t\t\tglUniform4f(loc, 0.f, 1.0f, 0.f, 1.0f);\n\n\t\t\tBasicShapes::drawLine(skeleton[joint.index].pos, skeleton[joint.index].pos+skeleton[joint.index].ori*weight.pos, vploc);\n\t\t}\n\t\tf+=1/(float)this->meshes.size();\n\t}\n\trManager.enableDepth();\n}\n//#endif\nvoid SkeletalModel::render(render::RenderManager &rManager)\n{\n\tthis->render(rManager, this->bindPoseSkeleton);\n}\nvoid SkeletalModel::render(render::RenderManager &rManager, const Skeleton &skeleton)\n{\n\tglBindVertexArray(this->vertexArrayID);\n\trManager.useShader(SHADER_fuzzyModel);\n\t//shaders::ShaderProgram *shader = rManager.useShader(SHADER_fuzzyModel);\n\t//GLint loc = shader->getShaderLocation(true, SHADER_solidColor_solidColor);\n\t//glUniform4f(loc, 0.f, 0.0f, 1.f, 1.0f);\n\tfor(MD5Mesh &mesh : this->meshes) {\n\t\tthis->render(rManager, skeleton, mesh, 0);\n\t}\n}\nvoid SkeletalModel::render(render::RenderManager &rManager, const Skeleton &skeleton, MD5Mesh &mesh, const Material *material)\n{\n\tGLfloat *vertexPositionData = new GLfloat[mesh.verts.size()*3*sizeof(GLfloat)];\n\tfor(int v=0;v<(int)mesh.verts.size();v++)\n\t{\n\t\tMD5Vertex &vertex = mesh.verts[v];\n\t\tglm::vec3 vect;\n\t\tvect.x = 0;\n\t\tvect.y = 0;\n\t\tvect.z = 0;\n\t\tfor(int w=0;w<vertex.countWeight;w++)\n\t\t{\n\t\t\tMD5Weight &weight = mesh.weights[vertex.startWeight+w];\n\t\t\tconst MD5Bone &bone = skeleton[weight.joint];\n\t\t\tvect += ( bone.pos + bone.ori*weight.pos)*weight.bias;\n\t\t}\n\t\tvertexPositionData[v*3+0] = vect.x;\n\t\tvertexPositionData[v*3+1] = vect.y;\n\t\tvertexPositionData[v*3+2] = vect.z;\n\t}\n\t// vertex position\n\tshaders::ShaderProgram *shader = shaders::ShaderProgram::getShader(SHADER_fuzzyModel);\n\n\tglBindBuffer(GL_ARRAY_BUFFER, mesh.vertexBufferID);\n\tglBufferData(GL_ARRAY_BUFFER, mesh.verts.size()*3*sizeof(GLfloat), vertexPositionData, GL_DYNAMIC_DRAW);\n\tshader->setVertexAttributePointer(SHADERVAR_vertex_position, 3, GL_FLOAT, GL_FALSE, 0, 0);\n\n\t//glBindBuffer(GL_ARRAY_BUFFER, mesh.normalBufferID);\n\t//glBufferData(GL_ARRAY_BUFFER, mesh.verts.size()*3*sizeof(GLfloat), normalVertexData, GL_DYNAMIC_DRAW);\n\t//shader->setVertexAttributePointer(SHADERVAR_vertex_normal, 3, GL_FLOAT, GL_FALSE, 0, 0);\n\n\t//glBindBuffer(GL_ARRAY_BUFFER, mesh.vertexTextureBufferID);\n\t//shader->setVertexAttributePointer(SHADERVAR_vertex_texture, 2, GL_FLOAT, GL_FALSE, 0, 0);\n\n\tglBindBuffer(GL_ARRAY_BUFFER, mesh.vertexColorBufferID);\n\tshader->setVertexAttributePointer(SHADERVAR_vertex_color, 3, GL_FLOAT, GL_FALSE, 0, 0);\n\n\n\t//glActiveTexture(GL_TEXTURE0);\n\t//int materialLocation = ((MaterialLibrary *)util::AssetManager::getAssetManager()->getAsset(mesh.mtlAssetId))->getMaterial(mesh.materialId)->map_Kd;\n\t//((DDSImage *)util::AssetManager::getAssetManager()->getAsset(materialLocation))->bindTexture();\n\n\t//glUniform1i(shaders::program_modelTexture_myTextureSampler, 0);\n\n\t//glVertexAttribPointer(uvloc, 2, GL_FLOAT, GL_FALSE, 0, 0);\n\n\tglBindBuffer(GL_ELEMENT_ARRAY_BUFFER, mesh.indexBufferID);\n\tglCullFace(GL_FRONT);\n\tglDrawElements(GL_TRIANGLES, mesh.indecies.size(), GL_UNSIGNED_INT, 0);\n\tglCullFace(GL_BACK);\n\n\tdelete [] vertexPositionData;\n}\nstd::ostream &operator<<(std::ostream &ost, const render::MD5Joint &joint)\n{\n\tost << \"\\t\\\"\" << joint.name << \"\\\" \" << joint.parent << std::endl;\n\treturn ost;\n}\nstd::ostream &operator<<(std::ostream &ost, const render::MD5Mesh &mesh)\n{\n\tost << \"\\t\" << \"[Material#\" << mesh.materialId << \":\" << mesh.materialId << \"]\" << mesh.verts.size() << \" verticies, \" << mesh.indecies.size() << \" triangles, \" << mesh.weights.size() << \" weights\";\n\treturn ost;\n}\nstd::ostream &operator<<(std::ostream &ost, const render::MD5Vertex &vert)\n{\n\tost << vert.index << \", \" << vert.startWeight << \"-\" << (vert.startWeight+vert.countWeight) << \" (\" << vert.countWeight << \" weights)\";\n\treturn ost;\n}\nstd::ostream &operator<<(std::ostream &ost, const render::MD5Weight &weight)\n{\n\tost << weight.index << \" \" << weight.joint << \" \" << weight.bias << \" ( \" << weight.pos.x << \" \" << weight.pos.y << \" \" << weight.pos.z << \" )\";\n\treturn ost;\n}\n" }, { "alpha_fraction": 0.7206053733825684, "alphanum_fraction": 0.7229336500167847, "avg_line_length": 25.030303955078125, "blob_id": "433a177a54a2e79548dd659d1e5a0a518c776f03", "content_id": "9ed7ac74580783b3dd4e00cb1473ae5a53710cf5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 859, "license_type": "no_license", "max_line_length": 65, "num_lines": 33, "path": "/src/cpp/world/entities/Turret.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_WORLD_ENTITIES_TURRET_HPP_\n#define FYP_WORLD_ENTITIES_TURRET_HPP_\n\n#include \"world/Entity.hpp\"\n#include \"render/SkeletalModel.hpp\"\n#include \"render/SkeletalAnimation.hpp\"\n#include \"ai/PerceptionManager.hpp\"\n#include \"ai/AimBot.hpp\"\n\nnamespace world {\n\tnamespace entities {\n\t\tclass Turret : public Entity {\n\t\t\ttypedef Entity super;\n\t\t\tprivate:\n\t\t\t\tglm::vec3 targetPosition;\n\t\t\t\trender::SkeletalModel *model;\n\t\t\t\trender::SkeletalAnimation *initAnimation;\n\t\t\t\tai::PerceptionManager *perception;\n\t\t\t\tai::AimBot *aimBot;\n\t\t\t\tdouble animationDuration,animationCurrent;\n\t\t\t\tbool initAnimating;\n\t\t\tpublic:\n\t\t\t\tTurret();\n\t\t\t\tvirtual ~Turret();\n\t\t\t\tvirtual void tick(util::DeltaTime &deltaTime);\n\t\t\t\tvirtual void render(render::RenderManager &rManager);\n\t\t\t\tvirtual void die(double time, glm::vec3 direction, int type);\n\t\t\t\tbool selector;\n\t\t};\n\t}\n}\n\n#endif\n" }, { "alpha_fraction": 0.6751101613044739, "alphanum_fraction": 0.6811674237251282, "avg_line_length": 16.631067276000977, "blob_id": "28068d103b8624ead144df492294a76e441b7088", "content_id": "75199ff750f3632e61a7bda53aedffa2f0a24bfc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1816, "license_type": "no_license", "max_line_length": 83, "num_lines": 103, "path": "/src/cpp/gui/Element.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"util/Globals.hpp\"\n#include \"util/gl.h\"\n#include \"render/BasicShapes.hpp\"\n#include \"render/shaders/ShaderUtils.hpp\"\n#include \"render/RenderManager.hpp\"\n\n#include \"Element.hpp\"\n\nusing namespace render;\n\nElement::Element()\n{\n\tthis->x = 0;\n\tthis->y = 0;\n\tthis->width = 0;\n\tthis->height = 0;\n\tthis->screen = 0;\n\tthis->next = 0;\n\tthis->prev = 0;\n\tthis->left = 0;\n\tthis->right = 0;\n\tthis->up = 0;\n\tthis->down = 0;\n\tthis->selected = false;\n}\nElement::~Element()\n{\n\t\n}\nfloat Element::getX()\n{\n\treturn this->x;\n}\nfloat Element::getY()\n{\n\treturn this->y;\n}\nfloat Element::getWidth()\n{\n\treturn this->width;\n}\nfloat Element::getHeight()\n{\n\treturn this->height;\n}\nElement *Element::setX(float x)\n{\n\tthis->x = x;\n\tthis->onElementResize();\n\treturn this;\n}\nElement *Element::setY(float y)\n{\n\tthis->y = y;\n\tthis->onElementResize();\n\treturn this;\n}\nElement *Element::setWidth(float width)\n{\n\tthis->width = width;\n\tthis->onElementResize();\n\treturn this;\n}\nElement *Element::setHeight(float height)\n{\n\tthis->height = height;\n\tthis->onElementResize();\n\treturn this;\n}\nElement *Element::setSize(float x, float y, float width, float height)\n{\n\tthis->x = x;\n\tthis->y = y;\n\tthis->width = width;\n\tthis->height = height;\n\tthis->onElementResize();\n\treturn this;\n}\nbool Element::isInside(float x, float y)\n{\n\treturn this->x<=x && this->x+this->width>=x\n\t && this->y<=y && this->y+this->height>=y;\n}\nvoid Element::render(util::DeltaTime &deltaTime, render::RenderManager &manager)\n{\n\tmanager.useShader(SHADER_solidColor);\n\t\n\tBasicShapes::renderUnitSquare(manager.getVertexPosition());\n}\nvoid Element::onElementResize()\n{\n}\nbool Element::onControlEvent(int control, int action)\n{\n\treturn false;\n}\nbool Element::onControlEvent(int control, double x, double y, double dx, double dy)\n{\n\treturn false;\n}\nvoid Element::onScreenResize()\n{\n}\n" }, { "alpha_fraction": 0.6688064932823181, "alphanum_fraction": 0.6728482842445374, "avg_line_length": 23.453489303588867, "blob_id": "59358525c90d0b5429122cd4626f80ebcd7e592a", "content_id": "4b5a2459f3f0c65d043698aaa52ee19da68e4879", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4206, "license_type": "no_license", "max_line_length": 142, "num_lines": 172, "path": "/src/cpp/ai/path/PathFinder.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include <glm/gtx/norm.hpp>\n\n#include \"PathFinder.hpp\"\n\nusing namespace ai::path;\n\nPathFinder::PathFinder()\n{\n\tthis->startPathNode = 0;\n\tthis->storedPathNodes.clear();\n\tthis->done = false;\n\tthis->sucess = false;\n}\nPathFinder::~PathFinder()\n{\n\tfor(auto &a : this->storedPathNodes)\n\t\tdelete a.second;\n}\nvoid PathFinder::start(PathNode *a, PathNode *b)\n{\n\tthis->iterations = 0;\n\t// Remember the algorithm works backwards\n\tthis->startPathNode = this->getStoredNode(b);\n\tthis->endPathNode = this->getStoredNode(a);\n\n\tthis->startPathNode->f = glm::distance(this->startPathNode->node->position, this->endPathNode->node->position);\n\tthis->endPathNode->f = 0;\n\n\tthis->openSet.clear();\n\tthis->closedSet.clear();\n\tthis->openSet.insert(this->startPathNode);\n\ta->current = true;\n\tb->current = true;\n\tthis->done = false;\n\tthis->sucess = false;\n}\nbool PathFinder::tick(int ticks)\n{\n\tif(this->startPathNode==0)\n\t\treturn false;\n\tif(this->done)\n\t\treturn true;\n\tif(ticks!=1)\n\t{\n\t\tfor(int i=1;i<ticks;i++)\n\t\t{\n\t\t\tif(this->tick(1))\n\t\t\t\treturn true;\n\t\t}\n\t}\n\tthis->iterations++;\n\tthis->c = 0;\n\t// Find a node in the open set with the lowest f cost\n\tif(!this->openSet.empty())\n\t{\n\t\tfloat min = std::numeric_limits<float>::max();\n\t\tfor(std::set<StoredPathNode *>::iterator openSetIterator = this->openSet.begin(); openSetIterator != this->openSet.end(); openSetIterator++)\n\t\t{\n\t\t\tStoredPathNode *s_node = *openSetIterator;\n\t\t\tif(s_node->f<min)\n\t\t\t{\n\t\t\t\tmin = s_node->f;\n\t\t\t\tc = s_node;\n\t\t\t}\n\t\t}\n\t}\n\t// Whats the new node we found in open set\n\tif(c==0)\n\t{\n\t\t// Done: Failed\n\t\tthis->done = true;\n\t\tthis->sucess = false;\n\t\treturn true;\n\t}\n\tif(c==this->endPathNode)\n\t{\n\t\t// Done: Complete path\n\t\tthis->done = true;\n\t\tthis->sucess = true;\n\t\tc = this->endPathNode;\n\t\twhile(c!=this->startPathNode)\n\t\t{\n\t\t\tc->node->current = true;\n\t\t\tc = c->parent;\n\t\t}\n\t\treturn true;\n\t}\n\tthis->openSet.erase(c);\n\tthis->closedSet.insert(c);\n\tc->node->open = false;\n\tc->node->closed = true;\n\tfloat newNeighbourG;\n\t// Add its parents to the open set\n\tfor(std::vector<PathNodeLink *>::iterator pnLinkIt = c->node->links.begin(); pnLinkIt!=c->node->links.end(); ++pnLinkIt)\n\t{\n\t\tPathNodeLink *pnLink = *pnLinkIt;\n\t\t// Get neighbour\n\t\tPathNode *otherNode = pnLink->a;\n\t\tif(otherNode==c->node)\n\t\t\totherNode = pnLink->b;\n\t\t// Get neighbour node\n\t\tStoredPathNode *neighbourNode = getStoredNode(otherNode);\n\t\t// Skip neighbours in closed set\n\t\tif(this->closedSet.find(neighbourNode)!=this->closedSet.end())\n\t\t\tcontinue;\n\t\tnewNeighbourG = c->g+static_cast<float>(pnLink->dist)/10+calculateDangerOffset(otherNode);\n\t\t// Add to open set if not already in\n\t\tif(this->openSet.find(neighbourNode)==this->openSet.end())\n\t\t{\n\t\t\tthis->openSet.insert(neighbourNode);\n\t\t\tneighbourNode->node->open = true;\n\t\t}\n\t\t// Determine if this path is worse\n\t\telse\n\t\t{\n\t\t\tif(newNeighbourG >= neighbourNode->g)\n\t\t\t\tcontinue;//Skip worse path\n\t\t}\n\t\t// Set up neighbour costs\n\t\tneighbourNode->parent = c;\n\t\tneighbourNode->parentLink = pnLink;\n\t\tneighbourNode->g = newNeighbourG;\n\t\tneighbourNode->h = glm::distance(neighbourNode->node->position, this->endPathNode->node->position);\n\t\tneighbourNode->f = neighbourNode->g+neighbourNode->h;\n\t}\n\treturn false;\n}\nfloat PathFinder::calculateDangerOffset(PathNode *pathNode)\n{\n\tfloat f;\n\tfor(auto dp : this->dangerPoints)\n\t{\n\t\tif(glm::distance2(pathNode->position, dp)<dangerRange*dangerRange)\n\t\t{\n\t\t\tf += ((this->dangerRange-glm::distance(pathNode->position, dp))/this->dangerRange)*this->dangerCost;\n\t\t}\n\t}\n\treturn f;\n}\nstd::vector<int> PathFinder::getPath()\n{\n\tstd::vector<int> path;\n\tthis->c = this->endPathNode;\n\tpath.push_back(this->c->node->id);\n\twhile(this->c!=this->startPathNode)\n\t{\n\t\tthis->c = this->c->parent;\n\t\tpath.push_back(this->c->node->id);\n\t}\n\treturn path;\n}\nStoredPathNode *PathFinder::getStoredNode(PathNode *node)\n{\n\tif(this->storedPathNodes.find(node->id)==this->storedPathNodes.end())\n\t{\n\t\tStoredPathNode *s_node;\n\t\ts_node = new StoredPathNode;\n\t\ts_node->parent = 0;\n\t\ts_node->node = node;\n\t\ts_node->f = 0;\n\t\ts_node->g = 0;\n\t\tthis->storedPathNodes[node->id] = s_node;\n\t\treturn s_node;\n\t}\n\treturn this->storedPathNodes[node->id];\n}\nPathNode *PathFinder::getCurrentNode()\n{\n\tif(this->c==0)\n\t\treturn 0;\n\treturn this->c->node;\n}\n" }, { "alpha_fraction": 0.7115384340286255, "alphanum_fraction": 0.7115384340286255, "avg_line_length": 12, "blob_id": "7dde199811a09088900dee5208f846f5999a7afb", "content_id": "ed62fc440521fdc936445a6a0bc38a8411e902ca", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 52, "license_type": "no_license", "max_line_length": 21, "num_lines": 4, "path": "/src/cpp/main.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_MAIN_HPP_\n#define FYP_MAIN_HPP_\n\n#endif\n" }, { "alpha_fraction": 0.7424511313438416, "alphanum_fraction": 0.7436352968215942, "avg_line_length": 30.867923736572266, "blob_id": "84cd5c8ca3b5748f1c71cc330098b1c391441440", "content_id": "d4b821daf8b7164a8886a01e20601fe4398bcf6d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1689, "license_type": "no_license", "max_line_length": 255, "num_lines": 53, "path": "/src/cpp/render/shaders/ShaderProgram.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_RENDER_SHADERS_SHADERPROGRAM_HPP_\n#define FYP_RENDER_SHADERS_SHADERPROGRAM_HPP_\n\nnamespace render {\n\tclass RenderManager;\n}\n\n#include <string>\n#include <cstdint>\n#include <vector>\n\n#include \"render/shaders/ShaderVariables.h\"\n#include \"util/gl.h\"\n#include \"render/MaterialLibrary.hpp\"\n\n\nnamespace render {\n\tnamespace shaders {\n\t\ttypedef std::int32_t ShaderMask;\n\t\tconst std::vector<std::string> shaderNames = {SHADERVAR_NAMES};\n\t\tclass ShaderProgram {\n\t\t\tpublic:\n\t\t\t\tShaderProgram(int shaderprogram_id, std::string shaderprogram_name, std::vector<GLint> shaderFiles, ShaderMask shaderVar_mask, ShaderMask shaderVar_overrideMask, std::vector<std::string> shaderVar_overrides, std::vector<std::string> shaderVar_custom);\n\t\t\t\t~ShaderProgram();\n\t\t\t\tstatic void loadShaders();\n\t\t\t\tstatic ShaderProgram *getShader(int shaderIndex);\n\t\t\t\tvoid useShader();\n\t\t\t\tGLint getShaderLocation(bool uniform, int shaderVar);\n\t\t\t\tbool setVertexAttributePointer(int shaderVar, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer);\n\t\t\t\tbool setMaterial(render::MaterialAsset mat);\n\t\t\tprivate:\n\t\t\t\tint shaderprogram_id;\n\t\t\t\tstd::string shaderprogram_name;\n\t\t\t\tstd::vector<GLint> shaderFiles;\n\t\t\t\tShaderMask shaderVar_mask;\n\t\t\t\tShaderMask shaderVar_overrideMask;\n\t\t\t\tstd::vector<std::string> shaderVar_overrides;\n\t\t\t\tstd::vector<std::string> shaderVar_custom;\n\t\t\t\t// GL spec\n\t\t\t\tGLuint programID;\n\t\t\t\tint shaderVar_locationsCount;\n\t\t\t\tGLint *shaderVar_locations;\n\n\t\t\t\tMaterialAsset currentMaterial;\n\n\t\t\t\tstatic GLint compileShader(std::string name, int mask, std::string code);\n\t\t\t\tstatic GLint *shader_files;\n\t\t\t\tstatic ShaderProgram **shader_program;\n\t\t};\n\t}\n}\n\n#endif\n" }, { "alpha_fraction": 0.7241379022598267, "alphanum_fraction": 0.7241379022598267, "avg_line_length": 13.5, "blob_id": "98454a27b12f80b05e9142a466ab9aec506635a0", "content_id": "701afae53bfdb89cce19fa1e662af434d455b1ab", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 174, "license_type": "no_license", "max_line_length": 40, "num_lines": 12, "path": "/src/cpp/util/Globals.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_UTIL_GLOBALS_HPP_\n#define FYP_UTIL_GLOBALS_HPP_\n\n#include <iostream>\n\nnamespace util {\n\tnamespace Globals {\n\t\tvoid fatalError(std::string errorMsg);\n\t}\n}\n\n#endif\n" }, { "alpha_fraction": 0.781619668006897, "alphanum_fraction": 0.7989081144332886, "avg_line_length": 29.52777862548828, "blob_id": "97c39fccc765174e471cc0f602325a19cda288de", "content_id": "fbc618cc519688b0aeb0177d7eb338f90f200d90", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1099, "license_type": "no_license", "max_line_length": 87, "num_lines": 36, "path": "/src/cpp/input/Controls.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_INPUT_CONTROLS_HPP_\n#define FYP_INPUT_CONTROLS_HPP_\n\n#include \"util/gl.h\"\n\ntypedef int Control;\n\n// Actions: bits 1,2\n#define CONTROL_ACTION_PRESS 1\n#define CONTROL_ACTION_RELEASE 2\n#define CONTROL_ACTION_REPEAT 3\n\n// Action sources: bits 3,4,5\n#define CONTROL_ACTION_KEY 4\n#define CONTROL_ACTION_MOUSE 8\n#define CONTROL_ACTION_SCROLL 16\n\n#define CONTROL_KEYACTION_PRESS (CONTROL_ACTION_KEY+CONTROL_ACTION_PRESS)\n#define CONTROL_KEYACTION_RELEASE (CONTROL_ACTION_KEY+CONTROL_ACTION_RELEASE)\n#define CONTROL_KEYACTION_REPEAT (CONTROL_ACTION_KEY+CONTROL_ACTION_REPEAT)\n\n#define CONTROL_MOUSEBUTTONACTION_PRESS (CONTROL_ACTION_MOUSE+CONTROL_ACTION_PRESS)\n#define CONTROL_MOUSEBUTTONACTION_RELEASE (CONTROL_ACTION_MOUSE+CONTROL_ACTION_RELEASE)\n#define CONTROL_MOUSEBUTTONACTION_REPEAT (CONTROL_ACTION_MOUSE+CONTROL_ACTION_REPEAT)\n\n#define CONTROL_GUI_NEXT 0\n#define CONTROL_GUI_PREV 1\n#define CONTROL_GUI_UP 2\n#define CONTROL_GUI_DOWN 3\n#define CONTROL_GUI_LEFT 4\n#define CONTROL_GUI_RIGHT 5\n\n#define CONTROL_GUI_SELECT GLFW_MOUSE_BUTTON_1\n#define CONTROL_GUI_ESCAPE GLFW_KEY_ESCAPE\n\n#endif\n" }, { "alpha_fraction": 0.6830986142158508, "alphanum_fraction": 0.70466548204422, "avg_line_length": 31.927536010742188, "blob_id": "231be098ac2616f21131f3be7211cf16851bf0c2", "content_id": "df0e375a50eb08fbadabdabdfa54b022e614bec6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2272, "license_type": "no_license", "max_line_length": 150, "num_lines": 69, "path": "/src/cpp/ai/AimBot.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include <cmath>\n#include \"world/World.hpp\"\n#include \"util/Boundaries.hpp\"\n#include \"render/BasicShapes.hpp\"\n\n#include \"AimBot.hpp\"\n\nusing namespace ai;\nusing namespace world;\nusing namespace util::Boundaries;\n\nAimBot::AimBot(ai::PerceptionManager *perception)\n{\n\tthis->perception = perception;\n\tthis->bulletSpread = glm::radians(2.f);\n\tthis->bulletLifespan = 0.02f;\n\tthis->bulletFireDelay = 0.43f;\n\tthis->lastBulletFiredTime = -1.f;\n\tthis->perceptionFireDelay = 1.f;\n\n\t//this->bulletLifespan = 0.001f;\n\t//this->bulletFireDelay = 0.001f;\n\t//this->bulletLifespan = this->bulletFireDelay;\n}\nAimBot::~AimBot()\n{\n\t\n}\nvoid AimBot::tick(util::DeltaTime &deltaTime)\n{\n\tif(this->perception->getPerceivedEntity()!=0 && (this->lastBulletFiredTime<0 || deltaTime.getTime()>this->lastBulletFiredTime+this->bulletFireDelay))\n\t{\n\t\tRaycast ray;\n\t\tray.origin = this->perception->getEyePosition();\n\t\tray.direction = (\n\t\t\t\t\tthis->perception->getOrientation()\n\t\t\t\t\t*glm::angleAxis(static_cast<float>(std::fmod(std::rand(),this->bulletSpread*2)-this->bulletSpread), glm::vec3(1,0,0))\n\t\t\t\t\t*glm::angleAxis(static_cast<float>(std::fmod(std::rand(), this->bulletSpread*2)-this->bulletSpread), glm::vec3(0,0,1))\n\t\t\t\t)*glm::vec3(0,1,0);\n\t\tray.maxDistance = 0;\n\t\tEntity *e = this->perception->getOriginEntity();\n\t\tRaycastResult result = this->perception->getOriginEntity()->getWorld().rayCast(ray, &e);\n\t\tBullet *bullet = new Bullet;\n\t\tthis->lastBulletFiredTime = bullet->startTime = deltaTime.getTime();\n\t\tbullet->start = ray.origin;\n\t\tbullet->end = ray.origin+ray.direction*(result.hit ? result.distance : 1000.f);\n\t\tthis->bullets.push_back(bullet);\n\t}\n\tfor(std::list<Bullet *>::iterator it = this->bullets.begin();it!=this->bullets.end();it++)\n\t{\n\t\tBullet *b = *it;\n\t\tif(deltaTime.getTime()-b->startTime>this->bulletLifespan)\n\t\t{\n\t\t\tit = this->bullets.erase(it);\n\t\t\tdelete b;\n\t\t}\n\t}\n}\nvoid AimBot::render(render::RenderManager &rManager)\n{\n\trManager.M = glm::mat4(1.0f);\n\trManager.markMDirty();\n\trender::shaders::ShaderProgram *shader = rManager.useShader(SHADER_solidColor);\n\tglUniform4f(shader->getShaderLocation(true, SHADER_solidColor_solidColor), 0.8f, 0.8f, 0.6f, 1.f);\n\tfor(auto *b : this->bullets)\n\t{\n\t\trender::BasicShapes::drawLine(b->start, b->end, rManager.getVertexPosition());\n\t}\n}\n" }, { "alpha_fraction": 0.7477272748947144, "alphanum_fraction": 0.7530303001403809, "avg_line_length": 25.93877601623535, "blob_id": "eebee28da41c8d5de739b01ec0ada2437c1d449a", "content_id": "ad14f3a126b88995a7451318983dd1f9b3783a7c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1320, "license_type": "no_license", "max_line_length": 118, "num_lines": 49, "path": "/src/cpp/render/Font.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_RENDER_FONT_HPP_\n#define FYP_RENDER_FONT_HPP_\n\nnamespace render {\n\tclass Font;\n}\n\n#include <ft2build.h>\n#include FT_FREETYPE_H\n#include <cstdarg>\n#include <string>\n#include \"util/gl.h\"\n#include \"RenderManager.hpp\"\n#include \"shaders/ShaderProgram.hpp\"\n\nnamespace render {\n\tstruct GlyphMetrics {\n\t\tfloat currentPixelSize;\n\t\tint top,left,width,height,advanceX,advanceY;\n\t\tbool textureLoaded;\n\t};\n\t\n\tclass Font {\n\t\tpublic:\n\t\t\tFont(std::string fontfamily, float heightMM);\n\t\t\t~Font();\n\t\t\t\n\t\t\tvoid printf(std::string text, render::RenderManager &rManager);\n\t\t\tfloat getTextWidth(std::string text, render::RenderManager &rManager);\n\t\t\tvoid setHeight(float heightMM);\n\t\t\tvoid setColor(float r, float g, float b, float a);\n\t\tprivate:\n\t\t\tshaders::ShaderProgram *shader;\n\t\t\tstatic bool libraryInit;\n\t\t\tstatic FT_Library library;\n\t\t\tGLint vertexPositionAttribute,vertexTextureAttribute,uniformTextColor,uniformTexture;\n\n\t\t\tFT_Face *face;\n\t\t\tstruct GlyphMetrics metrics[256];\n\t\t\tfloat heightMM;\n\t\t\tint heightPX;\n\t\t\tGLuint fontFaceVertexArrayObjectID, fontFaceTextures[256], fontFaceTextureCoordBufferID, fontFaceTextureUVBufferID;\n\t\t\tfloat r,g,b,a;\n\t\t\tstruct GlyphMetrics *getGlyphMetrics(char c, int calculatedPixelSize);\n\t\t\tstruct GlyphMetrics *getGlyphMetrics_NoTexture(char c, int calculatedPixelSize);\n\t};\n}\n\n#endif\n" }, { "alpha_fraction": 0.699999988079071, "alphanum_fraction": 0.699999988079071, "avg_line_length": 9, "blob_id": "0843311e947acffbc4ee60765fe25cc5f2f9001c", "content_id": "0bb7e3fca7ab77ee9adbba3b165b6a07afa5016c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 100, "license_type": "no_license", "max_line_length": 29, "num_lines": 10, "path": "/src/cpp/gui/elements/PulldownMenu.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"PulldownMenu.hpp\"\n\nPulldownMenu::PulldownMenu()\n{\n\t\n}\nPulldownMenu::~PulldownMenu()\n{\n\t\n}\n" }, { "alpha_fraction": 0.7015887498855591, "alphanum_fraction": 0.7169007658958435, "avg_line_length": 36.60173034667969, "blob_id": "0ba5e943f490382d42b75fb6f55d2c02d4ff6136", "content_id": "ff23631956ae12e88942c0ab77d95160e92e25aa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 8686, "license_type": "no_license", "max_line_length": 285, "num_lines": 231, "path": "/src/cpp/render/StaticModel.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"util/Globals.hpp\"\n#include <istream>\n#include <iostream>\n#include <unordered_map>\n#include \"util/StreamUtils.hpp\"\n#include \"util/gl.h\"\n#include \"render/RenderManager.hpp\"\n#include \"render/shaders/ShaderProgram.hpp\"\n#include \"render/MaterialLibrary.hpp\"\n#include \"util/AssetUtils.hpp\"\n#include \"util/Boundaries.hpp\"\n\n#include \"StaticModel.hpp\"\n\nusing namespace render;\nusing namespace util::StreamUtils;\nusing namespace util::Boundaries;\n\nStaticModel::StaticModel(int assetId, std::istream &fp) : Asset(assetId)\n{\n\tthis->setName(readString(fp));\n\tthis->bounds = new AABB();\n\t\n\t// int dataBufferStride = 0; - Field\n\tdataBufferStride = 3;\n\tdataBufferNormalsOffset = 0;\n\tdataBufferColorsOffset = 0;\n\tint vertexStride = 1;\n\tint vertexTexturesOffset = 0;\n\tint vertexNormalsOffset = 0;\n\tfloat *vertexPositions,*vertexTextures,*vertexNormals,*vertexColors;\n\t//std::vector<float> dataBuffer;//[[v[0], v[1], v[2], vt[0], vt[1], vn[0], vn[1], vn[2]]...]\n\tstd::unordered_map<struct FaceKey, int, FaceKeyHasher> assocMap;//[v index, vt index, vn index] : [faceIndex]\n\t\n\t//vertex positions\n\tlenVertexPositions = readInt(fp);\n\tvertexPositions = new float[lenVertexPositions*3];\n\tfp.read((char *)vertexPositions, sizeof(float)*lenVertexPositions*3);\n\t//vertex textures\n\tint lenVertexTextures = readInt(fp);\n\tif(lenVertexTextures>0)//enableTextures\n\t{\n\t\tvertexTextures = new float[lenVertexTextures*2];\n\t\tfp.read((char *)vertexTextures, sizeof(float)*lenVertexTextures*2);\n\t\tdataBufferStride += 2;\n\t\tvertexStride += 1;\n\t\tvertexTexturesOffset = 1;\n\t}\n\t//vertex normals\n\tint lenVertexNormals = readInt(fp);\n\tif(lenVertexNormals>0)//enableNormals\n\t{\n\t\tvertexNormals = new float[lenVertexNormals*3];\n\t\tfp.read((char *)vertexNormals, sizeof(float)*lenVertexNormals*3);\n\t\tdataBufferNormalsOffset = dataBufferStride;\n\t\tdataBufferStride += 3;\n\t\tvertexStride += 1;\n\t\tvertexNormalsOffset = vertexTexturesOffset+1;\n\t}\n\t//vertex colors\n\tint lenVertexColors = readInt(fp);\n\tif(lenVertexColors>0)//enableNormals\n\t{\n\t\tvertexColors = new float[lenVertexColors*3];\n\t\tfp.read((char *)vertexColors, sizeof(float)*lenVertexColors*3);\n\t\tdataBufferColorsOffset = dataBufferStride;\n\t\tdataBufferStride += 3;\n\t\tvertexStride += 1;\n\t\tvertexNormalsOffset = vertexTexturesOffset+1;\n\t}\n\t//objects\n\tint lenObjects = readInt(fp);\n\tstruct FaceKey faceKey;\n\tint currentVertexIndex = 0;\n\ttemp_totalVertexCount = 0;\n\tfloat minX = NAN;\n\tfloat minY = NAN;\n\tfloat minZ = NAN;\n\tfloat maxX = NAN;\n\tfloat maxY = NAN;\n\tfloat maxZ = NAN;\n\tfor(int i=0;i<lenObjects;i++)\n\t{\n\t\tStaticModelObject *o = new StaticModelObject;\n\t\t// Load the wavefront object\n\t\to->name = readString(fp);\n\t\to->mtlAsset = readInt(fp);\n\t\to->materialId = readInt(fp);\n\t\to->s = readBool(fp);\n\t\to->numPrimitives = readInt(fp);\n\t\tint numVerticies = o->numPrimitives*3;\n\t\tint *objectIndecies = new int[numVerticies*vertexStride];\n\t\to->indecies = new GLuint[numVerticies];\n\t\tfp.read((char *) objectIndecies, sizeof(int)*numVerticies*vertexStride);\n\t\t// Store the faces\n\t\ttemp_totalVertexCount+=numVerticies;\n\t\tfor(int v=0;v<numVerticies;v++)\n\t\t{\n\t\t\t// Load the vertex indexes for the components of the vertex\n\t\t\tfaceKey.vertexPositionIndex = objectIndecies[v*vertexStride]-1;\n\t\t\tfaceKey.vertexTextureIndex = (lenVertexTextures>0 ? objectIndecies[v*vertexStride+vertexTexturesOffset]-1 : -1);\n\t\t\tfaceKey.vertexNormalIndex = (lenVertexNormals>0 ? objectIndecies[v*vertexStride+vertexNormalsOffset]-1 : -1);\n\t\t\t\n\t\t\tstd::unordered_map<struct FaceKey, int, FaceKeyHasher>::const_iterator findFace = assocMap.find(faceKey);\n\t\t\tif(findFace==assocMap.end())//If vertex is not already associated\n\t\t\t{\n\t\t\t\t// Associate the store the new index\n\t\t\t\tassocMap[faceKey] = currentVertexIndex;\n\t\t\t\to->indecies[v] = currentVertexIndex;\n\t\t\t\tcurrentVertexIndex++;\n\t\t\t\t// Get the vertex data\n\t\t\t\tfloat &vX = vertexPositions[faceKey.vertexPositionIndex*3+0];\n\t\t\t\tfloat &vY = vertexPositions[faceKey.vertexPositionIndex*3+1];\n\t\t\t\tfloat &vZ = vertexPositions[faceKey.vertexPositionIndex*3+2];\n\t\t\t\t// Update the min/max\n\t\t\t\tif(minX!=minX || vX<minX) minX = vX-0.001f;\n\t\t\t\tif(minY!=minY || vY<minY) minY = vY-0.001f;\n\t\t\t\tif(minZ!=minZ || vZ<minZ) minZ = vZ-0.001f;\n\t\t\t\tif(maxX!=maxX || vX>maxX) maxX = vX+0.001f;\n\t\t\t\tif(maxY!=maxY || vY>maxY) maxY = vY+0.001f;\n\t\t\t\tif(maxZ!=maxZ || vZ>maxZ) maxZ = vZ+0.001f;\n\t\t\t\tdataBuffer.push_back(vX);\n\t\t\t\tdataBuffer.push_back(vY);\n\t\t\t\tdataBuffer.push_back(vZ);\n\t\t\t\t// Put the vertex data into the vertex buffer\n\t\t\t\tif(lenVertexTextures>0)\n\t\t\t\t{\n\t\t\t\t\tdataBuffer.push_back(vertexTextures[faceKey.vertexTextureIndex*2+0]);\n\t\t\t\t\tdataBuffer.push_back(vertexTextures[faceKey.vertexTextureIndex*2+1]);\n\t\t\t\t}\n\t\t\t\tif(lenVertexNormals>0)\n\t\t\t\t{\n\t\t\t\t\tdataBuffer.push_back(vertexNormals[faceKey.vertexNormalIndex*3+0]);\n\t\t\t\t\tdataBuffer.push_back(vertexNormals[faceKey.vertexNormalIndex*3+1]);\n\t\t\t\t\tdataBuffer.push_back(vertexNormals[faceKey.vertexNormalIndex*3+2]);\n\t\t\t\t}\n\t\t\t}\n\t\t\telse\n\t\t\t{\n\t\t\t\t// Store the existing vertex index in the new index buffer\n\t\t\t\to->indecies[v] = findFace->second;\n\t\t\t}\n\t\t}\n\t\tdelete [] objectIndecies;\n\t\tthis->objects.push_back(o);\n\t}\n\tthis->bounds->boxCenter[0] = (maxX+minX)/2.f;\n\tthis->bounds->boxCenter[1] = (maxY+minY)/2.f;\n\tthis->bounds->boxCenter[2] = (maxZ+minZ)/2.f;\n\tthis->bounds->boxHalfSize[0] = this->bounds->boxCenter[0]-minX;\n\tthis->bounds->boxHalfSize[1] = this->bounds->boxCenter[1]-minY;\n\tthis->bounds->boxHalfSize[2] = this->bounds->boxCenter[2]-minZ;\n\t//cleanup\n\tdelete [] vertexPositions;\n\tif(lenVertexTextures>0)\n\t\tdelete [] vertexTextures;\n\tif(lenVertexNormals>0)\n\t\tdelete [] vertexNormals;\n\tif(lenVertexColors>0)\n\t\tdelete [] vertexColors;\n}\nStaticModel::~StaticModel()\n{\n\t\n}\nvoid StaticModel::postload()\n{\n\t// Load\n\tglGenVertexArrays(1, &this->vertexArrayID);\n\tglBindVertexArray(this->vertexArrayID);\n\t\n\tglGenBuffers(1, &this->vertexDataBufferID);\n\tglBindBuffer(GL_ARRAY_BUFFER, this->vertexDataBufferID);\n\tglBufferData(GL_ARRAY_BUFFER, this->dataBuffer.size()*sizeof(GLfloat), &this->dataBuffer[0], GL_STATIC_DRAW);\n\t\n\tGLfloat *colors = new GLfloat[temp_totalVertexCount*3];\n\tfor(int i=0;i<temp_totalVertexCount*3;i++)\n\t\tcolors[i] = (float)(std::rand()%100)/100;\n\tglGenBuffers(1, &tempColorBuffer);\n\tglBindBuffer(GL_ARRAY_BUFFER, this->tempColorBuffer);\n\tglBufferData(GL_ARRAY_BUFFER, temp_totalVertexCount*3*sizeof(GLfloat), colors, GL_STATIC_DRAW);\n\tdelete [] colors;\n\n\tfor(StaticModelObject *object : this->objects)\n\t{\n\t\tglGenBuffers(1, &object->indexBufferID);\n\t\tglBindBuffer(GL_ELEMENT_ARRAY_BUFFER, object->indexBufferID);\n\t\tglBufferData(GL_ELEMENT_ARRAY_BUFFER, object->numPrimitives*3*sizeof(GLuint), object->indecies, GL_STATIC_DRAW);\n\t}\n}\nAABB &StaticModel::getBounds()\n{\n\treturn *this->bounds;\n}\nvoid StaticModel::render(render::RenderManager &rManager, int shader)\n{\n\tshaders::ShaderProgram *proc = rManager.useShader(shader);\n\tif(proc==0)\n\t{\n\t\tutil::Globals::fatalError(\"Failed to find shader\");\n\t}\n\t//glUniform1i(proc->getShaderLocation(true, SHADERVAR_material_map_Kd), 0);\n\n\tglBindVertexArray(this->vertexArrayID);\n\tglBindBuffer(GL_ARRAY_BUFFER, this->vertexDataBufferID);\n\n\t// Push the vertex attributes\n\tproc->setVertexAttributePointer(SHADERVAR_vertex_position, 3, GL_FLOAT, GL_FALSE, dataBufferStride*sizeof(GLfloat), 0);\n\tproc->setVertexAttributePointer(SHADERVAR_vertex_texture, 2, GL_FLOAT, GL_FALSE, dataBufferStride*sizeof(GLfloat), (void*)(3*sizeof(GLfloat)));\n\tproc->setVertexAttributePointer(SHADERVAR_vertex_normal, 3, GL_FLOAT, GL_FALSE, dataBufferStride*sizeof(GLfloat), (void*)(this->dataBufferNormalsOffset*sizeof(GLfloat)));\n\tproc->setVertexAttributePointer(SHADERVAR_vertex_color, 3, GL_FLOAT, GL_FALSE, dataBufferStride*sizeof(GLfloat), (void*)(this->dataBufferColorsOffset*sizeof(GLfloat)));\n\t\n\tfor(StaticModelObject *object : this->objects)\n\t{\n\t\tif(object->mtlAsset!=0)\n\t\t\tproc->setMaterial({object->mtlAsset, object->materialId});\n\n\t\tglBindBuffer(GL_ELEMENT_ARRAY_BUFFER, object->indexBufferID);\n\t\tglDrawElements(GL_TRIANGLES, object->numPrimitives*3, GL_UNSIGNED_INT, 0);\n\t}\n}\nvoid StaticModel::write(std::ostream &ost) const\n{\n\tost << \"[\" << this->getAssetID() << \":\" << this->getName() << \".obj] \" << this->dataBuffer.size()/this->dataBufferStride << \" verticies by \" << this->dataBufferStride << \" attributes (\" << this->dataBuffer.size() << \" dbuf size), \" << this->objects.size() << \" objects:\" << std::endl;\n\tfor(render::StaticModelObject *o : this->objects)\n\t\tost << *o << std::endl;\n}\nstd::ostream &operator<<(std::ostream &ost, const render::StaticModelObject &o)\n{\n\treturn ost << \"\t\" << o.name << \": Material [\" << o.mtlAsset << \":\" << o.materialId << \"]\" << \", Shading: \" << o.s << \", \" << o.numPrimitives << \" primitives (\" << (o.numPrimitives*3) << \" verticies) \";\n}\n" }, { "alpha_fraction": 0.6535825729370117, "alphanum_fraction": 0.654828667640686, "avg_line_length": 24.078125, "blob_id": "1b9bab5d4fba7aaf5aaa9f96f673e8bd40cae711", "content_id": "1089a36b76ca251057c31be98461c0219b969cea", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1605, "license_type": "no_license", "max_line_length": 84, "num_lines": 64, "path": "/src/cpp/util/StreamUtils.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#define __STRSKIP\n#include <iostream>\n#include \"StreamUtils.hpp\"\n\nusing namespace util;\n\n#ifdef ENABLE_STREAMUTILS_DEBUG\nstd::string StreamUtils::readString(const char *file, int line, std::istream &fp)\n#else\nstd::string StreamUtils::readString(std::istream &fp)\n#endif\n{\n\tint len;\n\tfp.read((char *)&len, sizeof(int));\n\tchar *buf = new char[len+1];\n\tfp.read(buf, len);\n\tbuf[len] = '\\0';\n#ifdef ENABLE_STREAMUTILS_DEBUG\n\tstd::cout << \"[\" << file << \":\" << line << \"]readString(fp): \" << buf << std::endl;\n#endif\n\tstd::string s = std::string(buf);\n\tdelete [] buf;\n\treturn s;\n}\n#ifdef ENABLE_STREAMUTILS_DEBUG\nint StreamUtils::readInt(const char *file, int line, std::istream &fp)\n#else\nint StreamUtils::readInt(std::istream &fp)\n#endif\n{\n\tint i;\n\tfp.read((char *)&i, sizeof(int));\n#ifdef ENABLE_STREAMUTILS_DEBUG\n\tstd::cout << \"[\" << file << \":\" << line << \"]readInt(fp): \" << i << std::endl;\n#endif\n\treturn i;\n}\n#ifdef ENABLE_STREAMUTILS_DEBUG\nfloat StreamUtils::readFloat(const char *file, int line, std::istream &fp)\n#else\nfloat StreamUtils::readFloat(std::istream &fp)\n#endif\n{\n\tfloat i;\n\tfp.read((char *)&i, sizeof(float));\n#ifdef ENABLE_STREAMUTILS_DEBUG\n\tstd::cout << \"[\" << file << \":\" << line << \"]readFloat(fp): \" << i << std::endl;\n#endif\n\treturn i;\n}\n#ifdef ENABLE_STREAMUTILS_DEBUG\nbool StreamUtils::readBool(const char *file, int line, std::istream &fp)\n#else\nbool StreamUtils::readBool(std::istream &fp)\n#endif\n{\n\tbool i;\n\tfp.read((char *)&i, sizeof(bool));\n#ifdef ENABLE_STREAMUTILS_DEBUG\n\tstd::cout << \"[\" << file << \":\" << line << \"]readBool(fp): \" << i << std::endl;\n#endif\n\treturn i;\n\n}\n" }, { "alpha_fraction": 0.7534883618354797, "alphanum_fraction": 0.7534883618354797, "avg_line_length": 14.357142448425293, "blob_id": "32e495b3e088f5adb1ed9c6e3d4e8d9a7d22b678", "content_id": "8df270c1c3228d48e3dae997c797ddd92b76ac3c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 215, "license_type": "no_license", "max_line_length": 42, "num_lines": 14, "path": "/src/cpp/gui/screens/ConfirmAction.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_GUI_SCREENS_CONFIRMACTION_HPP_\n#define FYP_GUI_SCREENS_CONFIRMACTION_HPP_\n\nclass ConfirmAction;\n\n// Include dependencies\n\nclass ConfirmAction {\n\tpublic:\n\t\tConfirmAction();\n\t\t~ConfirmAction();\n};\n\n#endif\n" }, { "alpha_fraction": 0.7274853587150574, "alphanum_fraction": 0.7333333492279053, "avg_line_length": 17.191490173339844, "blob_id": "4b8b4e9b7ab451a055d4765125ce65ddf1613415", "content_id": "b574c54feb86323075798ced27f5bef6b03f3ac3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 855, "license_type": "no_license", "max_line_length": 56, "num_lines": 47, "path": "/src/cpp/util/DeltaTime.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"DeltaTime.hpp\"\n\nusing namespace util;\n\nDeltaTime::DeltaTime(bool vsync, double targetFramerate)\n{\n\tthis->targetFramerate = targetFramerate;\n\tthis->currentTime = 0;\n\tthis->deltaTime = 0;\n\tthis->framerate = 0;\n\tthis->vsync = vsync;\n\tthis->offsetTime = 0;\n}\nDeltaTime::~DeltaTime()\n{\n\t\n}\nvoid DeltaTime::postTime(double time)\n{\n\tthis->deltaTime = time-this->currentTime;\n\tthis->framerate = 1/this->deltaTime;\n\tthis->currentTime = time;\n}\nvoid DeltaTime::setOffsetTime(double offsetTime)\n{\n\tthis->offsetTime = offsetTime;\n}\ndouble DeltaTime::getTime()\n{\n\treturn this->currentTime+this->offsetTime;\n}\ndouble DeltaTime::getTimeDelta()\n{\n\treturn this->deltaTime;\n}\ndouble DeltaTime::getFramerate()\n{\n\treturn this->framerate;\n}\ndouble DeltaTime::getTargetFramerate()\n{\n\treturn this->targetFramerate;\n}\nbool DeltaTime::isVsync()\n{\n\treturn this->vsync;\n}\n" }, { "alpha_fraction": 0.6496815085411072, "alphanum_fraction": 0.6878980994224548, "avg_line_length": 14.699999809265137, "blob_id": "c26d1140fb8d2767c04787da0814416cf84f9e73", "content_id": "d0a0e21d4d4e42d262269103ed924c309bac83df", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 157, "license_type": "no_license", "max_line_length": 56, "num_lines": 10, "path": "/src/cpp/render/shaders/code/modelTexture_fragment.c", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "//fragment\n\nin vec2 UV;\nimport uniform sampler2D material_map_Kd;\n\nout vec4 color;\n\nvoid main() {\n\tcolor = vec4(texture( material_map_Kd, UV ).rgb, 1.0f);\n}\n" }, { "alpha_fraction": 0.7652645707130432, "alphanum_fraction": 0.767978310585022, "avg_line_length": 27.346153259277344, "blob_id": "aa55e9ca12251d869831802cc284e107158220a3", "content_id": "ac0d4bbca4b1ede8e82c1e47fc76dcc38b57349c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 737, "license_type": "no_license", "max_line_length": 88, "num_lines": 26, "path": "/src/cpp/input/controls/PlayerGameControls.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_INPUT_CONTROLS_PLAYERGAMECONTROLS_HPP_\n#define FYP_INPUT_CONTROLS_PLAYERGAMECONTROLS_HPP_\n\nnamespace world {\n\tclass Entity;\n}\n\n#include \"input/ControlScheme.hpp\"\n#include \"world/collisions/StaticMesh.hpp\"\n\nnamespace controls {\n\tclass PlayerGameControls : public ControlScheme {\n\t\tprivate:\n\t\t\tdouble lastCursorX,lastCursorY,cursorX,cursorY,cursorDeltaX,cursorDeltaY;\n\t\t\tbool lastCursor;\n\t\t\tworld::collisions::StaticMesh *world;\n\t\t\tglm::vec3 velocity;\n\t\tpublic:\n\t\t\tPlayerGameControls(world::Entity *entity, world::collisions::StaticMesh *staticMesh);\n\t\t\tvirtual ~PlayerGameControls();\n\t\t\t//virtual void tick(util::DeltaTime &deltaTime);\n\t\t\tvoid tick2(render::RenderManager *rManager, util::DeltaTime &deltaTime);\n\t};\n}\n\n#endif\n" }, { "alpha_fraction": 0.7071428298950195, "alphanum_fraction": 0.7071428298950195, "avg_line_length": 13, "blob_id": "c42f034fbe07787c2240e082bdba77d639bac604", "content_id": "50d78a850789258fd755098aecf917d96b949297", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 140, "license_type": "no_license", "max_line_length": 31, "num_lines": 10, "path": "/src/cpp/ai/GoalManager.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_AI_GOALMANAGER_HPP_\n#define FYP_AI_GOALMANAGER_HPP_\n\nclass GoalManager {\n\tpublic:\n\t\tGoalManager();\n\t\t~GoalManager();\n};\n\n#endif\n" }, { "alpha_fraction": 0.75, "alphanum_fraction": 0.75, "avg_line_length": 25.66666603088379, "blob_id": "c4fe34fe29ba69a2f1c02c4a08e00c789a97474c", "content_id": "01423a8c3ed84bdae6abe8b1649197bc1527a77d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 560, "license_type": "no_license", "max_line_length": 82, "num_lines": 21, "path": "/src/cpp/gui/screens/PathTest.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_GUI_SCREENS_PATHTEST_HPP_\n#define FYP_GUI_SCREENS_PATHTEST_HPP_\n\n#include \"ai/path/DebugPathHolder.hpp\"\n#include \"ai/path/PathFinder.hpp\"\n#include \"gui/Screen.hpp\"\n#include \"render/RenderManager.hpp\"\n#include \"util/DeltaTime.hpp\"\n\nclass PathTest : public Screen {\n\tpublic:\n\t\tPathTest();\n\t\t~PathTest();\n\t\tvirtual void render(util::DeltaTime &deltaTime, render::RenderManager &manager);\n\t\tai::path::DebugPathHolder *pathHolder;\n\t\tai::path::PathFinder *pathFinder;\n\t\tdouble lastTime;\n\t\tvirtual bool onControlEvent(int control, int action);\n};\n\n#endif\n" }, { "alpha_fraction": 0.7050847411155701, "alphanum_fraction": 0.7093220353126526, "avg_line_length": 22.13725471496582, "blob_id": "5d76530b83b6288909d85f4b3b04470ef7877278", "content_id": "85e52649b58ba389273e91687eb1c75803fe877f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1180, "license_type": "no_license", "max_line_length": 76, "num_lines": 51, "path": "/src/cpp/gui/screens/MainMenu.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"gui/ScreenManager.hpp\"\n#include \"gui/screens/PathTest.hpp\"\n#include \"gui/screens/GameView.hpp\"\n#include \"gui/elements/Button.hpp\"\n\n#include \"MainMenu.hpp\"\n\nusing namespace screens;\n\nMainMenu::MainMenu() : Screen()\n{\n\tthis->playButton = new Button(\"Play\");\n\tthis->pathTestButton = new Button(\"???\");\n\tthis->exitButton = new Button(\"Quit\");\n\n\tthis->playButton->setY(20);\n\tthis->pathTestButton->setY(10);\n\t//this->exitButton->setY(0);\n\n\tthis->addElement(this->playButton);\n\tthis->addElement(this->pathTestButton);\n\tthis->addElement(this->exitButton);\n}\nMainMenu::~MainMenu()\n{\n\tdelete this->playButton;\n\tdelete this->pathTestButton;\n\tdelete this->exitButton;\n}\nbool MainMenu::onControlEvent(Control control, int action)\n{\n\tif((action&CONTROL_MOUSEBUTTONACTION_PRESS) && control==CONTROL_GUI_SELECT)\n\t{\n\t\tif(this->selectedElement==this->playButton)\n\t\t{\n\t\t\tthis->manager->openRootScreen(new GameView());\n\t\t\treturn true;\n\t\t}\n\t\telse if(this->selectedElement==this->pathTestButton)\n\t\t{\n\t\t\tthis->manager->openRootScreen(new PathTest());\n\t\t\treturn true;\n\t\t}\n\t\telse if(this->selectedElement==this->exitButton)\n\t\t{\n\t\t\tthis->manager->close();\n\t\t\treturn true;\n\t\t}\n\t}\n\treturn false;\n}\n" }, { "alpha_fraction": 0.712909460067749, "alphanum_fraction": 0.7225433588027954, "avg_line_length": 24.317073822021484, "blob_id": "47a4c6c7ffbfab04f0fdf3e5f5acd151f7cf4720", "content_id": "05be1e4eebba78304f4bc44a4186fdad94e9ff97", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1038, "license_type": "no_license", "max_line_length": 93, "num_lines": 41, "path": "/src/cpp/gui/screens/PathTest.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"render/RenderManager.hpp\"\n#include \"util/DeltaTime.hpp\"\n#include \"input/Controls.hpp\"\n#include \"gui/screens/MainMenu.hpp\"\n#include \"gui/ScreenManager.hpp\"\n#include \"ai/path/DebugPathHolder.hpp\"\n\n#include \"PathTest.hpp\"\n\nusing namespace ai::path;\n\nPathTest::PathTest() : Screen()\n{\n\tthis->pathHolder = new DebugPathHolder();\n\tthis->pathFinder = new PathFinder();\n\tthis->pathFinder->start(this->pathHolder->nodes.at(0), this->pathHolder->nodes.at(50*50-1));\n\tthis->lastTime = 0;\n}\nPathTest::~PathTest()\n{\n\tdelete this->pathHolder;\n\tdelete this->pathFinder;\n}\nvoid PathTest::render(util::DeltaTime &deltaTime, render::RenderManager &manager)\n{\n\tthis->pathHolder->render(deltaTime, manager);\n\t/*if(this->lastTime<deltaTime->getTime()-0.1)\n\t{\n\t\tthis->lastTime = deltaTime->getTime();\n\t}*/\n\tthis->pathFinder->tick(1);\n}\nbool PathTest::onControlEvent(int control, int action)\n{\n\tif((control&CONTROL_GUI_ESCAPE) && (action&CONTROL_KEYACTION_PRESS))\n\t{\n\t\tthis->manager->openRootScreen(new MainMenu());\n\t\treturn true;\n\t}\n\treturn false;\n}\n" }, { "alpha_fraction": 0.6552845239639282, "alphanum_fraction": 0.6981030106544495, "avg_line_length": 27.828125, "blob_id": "aaaf534a75600c219e09ef746cc8b7fb89de685e", "content_id": "02fdb6aa9f471c7cc1066b527953cd2efe30782a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1845, "license_type": "no_license", "max_line_length": 105, "num_lines": 64, "path": "/src/cpp/gui/elements/Button.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include <iostream>\n#include \"render/BasicShapes.hpp\"\n#include \"render/shaders/ShaderUtils.hpp\"\n#include <glm/gtc/matrix_transform.hpp>\n#include \"render/RenderManager.hpp\"\n#include \"render/Font.hpp\"\n\n#include \"Button.hpp\"\n\n#define RGBf(r,g,b) r/255.f, g/255.f, b/255.f\n\nusing namespace render;\n\nrender::Font *Button::buttonFont = 0;\nButton::Button(std::string text) : Element()\n{\n\tsetSize(0, 0, 60, 10);\n\tthis->selectedBackgroundColor = glm::vec4(RGBf(140, 167, 246), 1.f);\n\tthis->backgroundColor = glm::vec4(RGBf(84, 106, 171), 1.f);\n\tthis->text = text;\n\tif(this->buttonFont==0)\n\t{\n\t\tthis->buttonFont = new render::Font(\"cour.ttf\", 8);\n\t}\n}\nButton::~Button()\n{\n\t\n}\nvoid Button::render(util::DeltaTime &deltaTime, render::RenderManager &rManager)\n{\n\tglm::mat4 translationMatrix = glm::translate(glm::mat4(1.0f), glm::vec3(this->getX(), this->getY(), 0));\n\tglm::mat4 boxMat;\n\n\t// Render the background box\n\tboxMat = translationMatrix*glm::scale(boxMat, glm::vec3(this->getWidth(), this->getHeight(), 0));\n\t\n\trManager.M = boxMat;\n\trManager.markMDirty();\n\tshaders::ShaderProgram *prog = rManager.useShader(SHADER_solidColor);\n\tGLint loc = prog->getShaderLocation(true, SHADER_solidColor_solidColor);\n\tGLint vploc = rManager.getVertexPosition();\n\n\tif(this->selected)\n\t\tglUniform4fv(loc, 1, &this->selectedBackgroundColor[0]);\n\telse\n\t\tglUniform4fv(loc, 1, &this->backgroundColor[0]);\n\t\n\tBasicShapes::renderUnitSquare(vploc);\n\n\t//// Render debug line\n\t//rManager.M = translationMatrix;\n\t//rManager.markMDirty();\n\n\t//glUniform4f(loc, 0.5f, 0.f, 0.f, 1.f);\n\t//BasicShapes::drawLine(glm::vec3(0,0,0),glm::vec3(10,10,0),vploc);\n\n\t// Render text\n\tboxMat = glm::mat4(1.0f);\n\tboxMat = glm::translate(boxMat, glm::vec3(this->getX()+1.0f, this->getY()+3.0f, 0));\n\trManager.M = boxMat;\n\trManager.markMDirty();\n\tthis->buttonFont->printf(this->text, rManager);\n}\n" }, { "alpha_fraction": 0.7230965495109558, "alphanum_fraction": 0.732879638671875, "avg_line_length": 24.83516502380371, "blob_id": "4c6db22ebb8395c36a073053a22dd5c34054463d", "content_id": "be8334fe31a34618b7f8a1e83192aeab68f2f0a3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4702, "license_type": "no_license", "max_line_length": 164, "num_lines": 182, "path": "/src/cpp/render/RenderManager.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"util/Globals.hpp\"\n#include \"util/gl.h\"\n#include \"shaders/ShaderProgram.hpp\"\n#include \"render/BasicShapes.hpp\"\n\n#include \"RenderManager.hpp\"\n\nusing namespace render;\nusing namespace shaders;\n\nRenderManager::RenderManager() {\n\tthis->mDirty = false;\n\tthis->vDirty = false;\n\tthis->pDirty = false;\n\tthis->mvDirty = false;\n\tthis->vpDirty = false;\n\tthis->mvpDirty = false;\n\tthis->doCullFace = false;\n\tthis->doDepthBuffer = false;\n}\n\nRenderManager::~RenderManager() {\n}\n\nvoid RenderManager::setMVPMatrix(GLuint mvpMatrixShaderLocation) {\n\tif(this->mvpDirty)\n\t{\n\t\tMVP = P*V*M;\n\t\tthis->mvpDirty = false;\n\t}\n\tglUniformMatrix4fv(mvpMatrixShaderLocation, 1, GL_FALSE, &MVP[0][0]);\n}\n\nvoid RenderManager::markPDirty() {\n\tthis->pDirty = true;\n\tthis->vpDirty = true;\n\tthis->mvpDirty = true;\n}\n\nvoid RenderManager::markVDirty() {\n\tthis->vDirty = true;\n\tthis->mvDirty = true;\n\tthis->vpDirty = true;\n\tthis->mvpDirty = true;\n}\n\nvoid RenderManager::markMDirty() {\n\tthis->mDirty = true;\n\tthis->mvDirty = true;\n\tthis->mvpDirty = true;\n}\n\nvoid RenderManager::pushMatrixM()\n{\n\tthis->stackM = this->M;\n\tthis->stackMV = this->MV;\n\tthis->stackMVP = this->MVP;\n}\nvoid RenderManager::popMatrixM()\n{\n\t\n\tthis->M = this->stackM;\n\tthis->MV = this->stackMV;\n\tthis->MVP = this->stackMVP;\n}\n\n#define MATRIX_SHADER_INJECT(INTVAR, MATVAR) loc = shaderProgram.getShaderLocation(true, INTVAR); if(loc!=-1) glUniformMatrix4fv(loc, 1, GL_FALSE, &MATVAR[0][0])\n\n// Shaders\nvoid RenderManager::setShaderMatricies(ShaderProgram &shaderProgram)\n{\n\tif(this->mvpDirty)\n\t{\n\t\tMVP = P*V*M;\n\t\tthis->mvpDirty = false;\n\t}\n\tGLint loc;\n\tMATRIX_SHADER_INJECT(SHADERVAR_matrix_M, M);\n\tMATRIX_SHADER_INJECT(SHADERVAR_matrix_V, V);\n\tMATRIX_SHADER_INJECT(SHADERVAR_matrix_P, P);\n\tMATRIX_SHADER_INJECT(SHADERVAR_matrix_MV, MV);\n\tMATRIX_SHADER_INJECT(SHADERVAR_matrix_VP, VP);\n\tMATRIX_SHADER_INJECT(SHADERVAR_matrix_MVP, MVP);\n}\nshaders::ShaderProgram *RenderManager::useShader(int shader)\n{\n\tthis->shader = shaders::ShaderProgram::getShader(shader);\n\tif(this->shader==0)\n\t{\n\t\tutil::Globals::fatalError(\"Selected invalid shader\");\n\t}\n\tthis->shader->useShader();\n\tsetShaderMatricies(*this->shader);\n\treturn this->shader;\n}\nGLint RenderManager::getVertexPosition()\n{\n\treturn this->shader->getShaderLocation(false, SHADERVAR_vertex_position);\n}\nGLint RenderManager::getVertexNormal()\n{\n\treturn this->shader->getShaderLocation(false, SHADERVAR_vertex_normal);\n}\nGLint RenderManager::getVertexTexture()\n{\n\treturn this->shader->getShaderLocation(false, SHADERVAR_vertex_texture);\n}\n\nvoid RenderManager::enableDepth() {\n\tthis->doDepthBuffer = true;\n\tglDepthFunc(GL_LESS);\n\tglEnable(GL_DEPTH_TEST);\n}\nvoid RenderManager::disableDepth() {\n\tglDisable(GL_DEPTH_TEST);\n}\nvoid RenderManager::enableCullFace() {\n\tif(!this->doCullFace)\n\t{\n\t\tthis->doCullFace = true;\n\t\tglCullFace(GL_BACK);\n\t\tglFrontFace(GL_CCW);\n\t\tglEnable(GL_CULL_FACE);\n\t}\n}\nvoid RenderManager::disableCullFace() {\n\tif(this->doCullFace)\n\t{\n\t\tthis->doCullFace = false;\n\t\tglDisable(GL_CULL_FACE);\n\t}\n}\nvoid RenderManager::enableTransparency() {\n\tglEnable(GL_BLEND);\n\tglBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);\n}\nvoid RenderManager::disableTransparency() {\n\tglDisable(GL_BLEND);\n}\n\nvoid RenderManager::setDimensionsPx(int widthPx, int heightPx)\n{\n\tthis->widthPx = widthPx;\n\tthis->heightPx = heightPx;\n}\nvoid RenderManager::setDimensionsMM(float widthMM, float heightMM)\n{\n\tthis->widthMM = widthMM;\n\tthis->heightMM = heightMM;\n}\nint RenderManager::getWidthPx() {\n\treturn this->widthPx;\n}\nint RenderManager::getHeightPx() {\n\treturn this->heightPx;\n}\nfloat RenderManager::getWidthMM() {\n\treturn this->widthMM;\n}\nfloat RenderManager::getHeightMM() {\n\treturn this->heightMM;\n}\n\nvoid RenderManager::renderDirectionVector(const glm::vec3 position, const glm::vec3 direction, const glm::vec4 color)\n{\n\tthis->pushMatrixM();\n\tthis->M = glm::mat4(1.0f);\n\tthis->markMDirty();\n\trender::shaders::ShaderProgram *prog = this->useShader(SHADER_solidColor);\n\tglUniform4f(prog->getShaderLocation(true, SHADER_solidColor_solidColor), color.r, color.g, color.b, color.a);\n\tBasicShapes::drawLine(position, position+direction, getVertexPosition());\n\tthis->popMatrixM();\n}\nvoid RenderManager::renderDirectionVectors(const glm::vec3 position, const glm::vec3 directionForward, const glm::vec3 direction2, const glm::vec4 direction2_color)\n{\n\tthis->renderDirectionVector(position, directionForward, glm::vec4(0.f, 1.f, 0.f, 1.f));\n\tthis->renderDirectionVector(position+directionForward, direction2*0.2f, direction2_color);\n}\nvoid RenderManager::renderOrientation(const glm::vec3 position, const glm::quat q)\n{\n\tthis->renderDirectionVectors(position, q*glm::vec3(0,1,0), q*glm::vec3(0,0,1), glm::vec4(0.f, 0.f, 1.f, 1.f));\n}\n" }, { "alpha_fraction": 0.7132353186607361, "alphanum_fraction": 0.7147058844566345, "avg_line_length": 22.44827651977539, "blob_id": "eb879a317926cbbff1343c6f9ba0f64dfb03cd3c", "content_id": "a667a4008191299601a1f9c35dcafda39d73f280", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 680, "license_type": "no_license", "max_line_length": 65, "num_lines": 29, "path": "/src/cpp/world/entities/Enemy.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_WORLD_ENTITIES_ENEMY_HPP_\n#define FYP_WORLD_ENTITIES_ENEMY_HPP_\n\n#include \"world/Entity.hpp\"\n\n//#include \"ai/ObjectiveManager.hpp\"\nnamespace ai {\n\tclass ObjectiveManager;\n}\n#include \"ai/path/PathExecuter.hpp\"\n\nnamespace world {\n\tnamespace entities {\n\t\ttypedef Entity super;\n\t\tclass Enemy : public Entity {\n\t\t\tai::ObjectiveManager *objectiveManager;\n\t\t\tdouble animTime;\n\t\t\tpublic:\n\t\t\t\tEnemy(world::World *world);\n\t\t\t\tvirtual ~Enemy();\n\t\t\t\tvirtual void tick(util::DeltaTime &deltaTime);\n\t\t\t\tvirtual void render(render::RenderManager &rManager);\n\t\t\t\tvirtual void die(double time, glm::vec3 direction, int type);\n\t\t\t\tvoid setPath(std::vector<int> path);\n\t\t};\n\t}\n}\n\n#endif\n" }, { "alpha_fraction": 0.740338146686554, "alphanum_fraction": 0.740338146686554, "avg_line_length": 19.700000762939453, "blob_id": "01a9928b2559a264b711de5ece8c0ea456dc1d29", "content_id": "e182db9c757ee534b313c4deb2b658f10a46ceec", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 828, "license_type": "no_license", "max_line_length": 82, "num_lines": 40, "path": "/src/cpp/gui/screens/GameView.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_GUI_SCREENS_GAMEVIEW_HPP_\n#define FYP_GUI_SCREENS_GAMEVIEW_HPP_\n\n#include \"gui/Screen.hpp\"\n#include \"input/Controls.hpp\"\n#include <glm/detail/type_mat.hpp>\n#include \"render/Font.hpp\"\n\nnamespace world {\n\tclass World;\n}\nnamespace util {\n\tclass DeltaTime;\n}\nnamespace render {\n\tclass RenderManager;\n}\n\nnamespace screens {\n\n\tclass GameView: public Screen {\n\tprivate:\n\t\tworld::World *world;\n\t\trender::Font *statusFont;\n\t\tdouble pauseStartTime,pauseOffsetTime;\n\t\tbool wasSurface;\n\t\tint gameCompletionState;\n\tpublic:\n\t\tGameView();\n\t\tvirtual ~GameView();\n\t\tvoid onGameOver(int type);\n\t\tvirtual void render(util::DeltaTime &deltaTime, render::RenderManager &manager);\n\t\tbool onControlEvent(Control control, int action);\n\t\tvirtual void onScreenResize();\n\t\tvirtual bool supportsCursor();\n\t};\n\n} /* namespace screens */\n\n#endif\n" }, { "alpha_fraction": 0.6523662209510803, "alphanum_fraction": 0.6556833386421204, "avg_line_length": 39.738739013671875, "blob_id": "79bf20cf9914f9a39d04eb22f97eccbce236a7a3", "content_id": "f368338245844d02ab2e4476f137ff463da181b3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 4522, "license_type": "no_license", "max_line_length": 225, "num_lines": 111, "path": "/Makefile", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "SRCDIR=src\nBINDIR=bin\nHOOKSDIR=hooks\nBININCDIR=$(BINDIR)/include\n\nall: assets.gz .SHADERS MuseumGuard\n\n### Assets\n\nRESBIN=assets.gz\nRESSRCDIR=$(SRCDIR)/res\nRESBINDIR=$(BINDIR)/object/res\nRESDEPDIR=$(BINDIR)/depend/res\nRESINCDIR=$(BININCDIR)/res\n\nASSETS_META_HOOK = $(HOOKSDIR)/asset_makemeta.py\nASSETS_META_FILE = $(BININCDIR)/AssetsMeta.h\n\nASSETS_HOOK_CONVERT = $(HOOKSDIR)/asset_convert.py\nASSETS_HOOK_CONVERT_IMAGE = $(HOOKSDIR)/asset_convert_image.py\nASSETS_HOOK_COMMON = $(HOOKSDIR)/asset_common.py\nASSETS_HOOK_PARSER = $(HOOKSDIR)/asset_parsers.py\nASSETS_CONVERT_DEPS = $(ASSETS_HOOK_CONVERT) $(ASSETS_HOOK_CONVERT_IMAGE) $(ASSETS_HOOK_COMMON) $(ASSETS_HOOK_PARSER)\nASSETS_CONVERT_FLAGS=--global-meta \"$(ASSETS_META_FILE)\" --verbose 0\n\n# MTL Files\nASSETS_MTL = $(shell find $(RESSRCDIR) -type f -name '*.mtl')\n# OBJ Files\nASSETS_OBJ = $(shell find $(RESSRCDIR) -type f -name '*.obj')\n# MD5 Meshes\nASSETS_MD5MESH = $(shell find $(RESSRCDIR) -type f -name '*.md5mesh')\n# MD5 Animations\nASSETS_MD5ANIM = $(shell find $(RESSRCDIR) -type f -name '*.md5anim')\n# Textures\nASSETS_TEXTURES = $(shell find $(RESSRCDIR) -type f -name '*.tga') $(shell find $(RESSRCDIR) -type f -name '*.png') $(shell find $(RESSRCDIR) -type f -name '*.jpg')\n\nRESSRCS = $(ASSETS_MTL) $(ASSETS_OBJ) $(ASSETS_MD5MESH) $(ASSETS_MD5ANIM) $(ASSETS_TEXTURES) \nRESINCS = $(patsubst $(RESSRCDIR)/%,$(RESBINDIR)/%.h,$(RESSRCS))\nRESBINS = $(patsubst $(RESSRCDIR)/%,$(RESBINDIR)/%.o,$(RESSRCS))\nRESDEPS = $(patsubst $(RESSRCDIR)/%,$(RESBINDIR)/%.d,$(RESSRCS))\n\n-include $(RESDEPS)\n\n$(RESBIN): $(ASSETS_META_FILE) $(RESBINS)\n\tcat $(RESBINS) > \"$@\"\n\n$(ASSETS_META_FILE): $(RESSRCS) $(ASSETS_META_HOOK)\n\t@mkdir -p \"$(@D)\"\n\t$(ASSETS_META_HOOK) --source $(RESSRCS) --meta \"$@\"\n\n$(RESBINDIR)/%.o $(RESINCDIR)/%.h: $(RESSRCDIR)/% $(ASSETS_CONVERT_DEPS)\n\t@mkdir -p \"$(@D)\"\n\t@mkdir -p \"$(patsubst $(RESBINDIR)%,$(RESINCDIR)%, $(@D))\"\n\t@mkdir -p \"$(patsubst $(RESBINDIR)%,$(RESDEPDIR)%, $(@D))\"\n\t$(ASSETS_HOOK_CONVERT) $(ASSETS_CONVERT_FLAGS) --meta \"$(patsubst $(RESSRCDIR)%,$(RESINCDIR)%, $(<)).h\" --depend \"$(patsubst $(RESSRCDIR)%,$(RESDEPDIR)%, $(<)).d\" --src \"$<\" --out \"$@\"\n\n### CPP Vars\n\nCPPSRCDIR=$(SRCDIR)/cpp\nCPPBINDIR=$(BINDIR)/object/cpp\nCPPDEPDIR=$(BINDIR)/depend/cpp\n\nCXX=g++\nCXXFLAGS=-g -fPIE -Wall -Wextra -Wno-unused-parameter -ansi -std=c++11 -I$(CPPSRCDIR) -I$(BININCDIR) -I/usr/include/freetype2 -include util/SuperDebug.h\nLDFLAGS=-std=c++11 -lglfw -lGL -lGLU -lGLEW -lfreetype -lpng -lboost_system -lboost_iostreams -lSOIL -pthread\nCPPBIN=MuseumGuard\nSUPERGLOBAL=$(CPPSRCDIR)/util/SuperDebug.h\n\nCPPSRCS = $(shell find $(CPPSRCDIR) -type f -name '*.cpp')\nCPPBINS = $(patsubst $(CPPSRCDIR)/%.cpp, $(CPPBINDIR)/%.o, $(CPPSRCS))\nCPPDEPS = $(patsubst $(CPPSRCDIR)/%.cpp, $(CPPDEPDIR)/%.d, $(CPPSRCS))\n\n-include $(CPPDEPS)\n\n$(CPPBIN): $(CPPBINS)\n\t$(CXX) $(LDFLAGS) -o $(CPPBIN) $(CPPBINS)\n\n# C++ Source => Objects\n$(CPPBINDIR)/%.o: $(CPPSRCDIR)/%.cpp $(SUPERGLOBAL)\n\t@mkdir -p \"$(@D)\"\n\t@mkdir -p \"$(patsubst $(CPPBINDIR)%,$(CPPDEPDIR)%, $(@D))\"\n\t$(CXX) $(CXXFLAGS) -c $< -o $@\n\t@$(CXX) $(CXXFLAGS) -DDEP -MM $< -MT $@ -MF $(patsubst $(CPPBINDIR)/%.o,$(CPPDEPDIR)/%.d, $(@))\n\t@cp -f $(patsubst $(CPPBINDIR)/%.o,$(CPPDEPDIR)/%.d, $(@)) $(patsubst $(CPPBINDIR)/%.o,$(CPPDEPDIR)/%.d, $(@)).tmp\n\t@sed -e 's/.*://' -e 's/\\\\$$//' < $(patsubst $(CPPBINDIR)/%.o,$(CPPDEPDIR)/%.d, $(@)).tmp | fmt -1 | sed -e 's/^ *//' -e 's/$$/:/' >> $(patsubst $(CPPBINDIR)/%.o,$(CPPDEPDIR)/%.d, $(@))\n\t@rm -f $(patsubst $(CPPBINDIR)/%.o,$(CPPDEPDIR)/%.d, $(@)).tmp\n\n### Shaders\n\nSHADER_HOOK = $(HOOKSDIR)/shaders_convert.py\nSHADER_VARIABLES_IN = $(CPPSRCDIR)/render/shaders/shader_variables.txt\nSHADER_FOLDER = $(CPPSRCDIR)/render/shaders/code/\nSHADER_DESC = $(CPPSRCDIR)/render/shaders/shaders.txt\n\nSHADER_HEADER = $(BININCDIR)/render/shaders/ShaderPrograms.h\nSHADER_VARIABLES_OUT = $(BININCDIR)/render/shaders/ShaderVariables.h\nSHADER_LOADER = $(BININCDIR)/render/shaders/ShaderLoader.c\n\nSHADER_PROGRAMS = $(shell find $(SHADER_FOLDER) -type f -name '*.c')\n\n.SHADERS: $(SHADER_VARIABLES_OUT) $(SHADER_HEADER) $(SHADER_LOADER)\n\n$(SHADER_VARIABLES_OUT) $(SHADER_HEADER) $(SHADER_LOADER): $(SHADER_VARIABLES_IN) $(SHADER_DESC) $(SHADER_PROGRAMS) $(SHADER_HOOK)\n\t@mkdir -p \"$(@D)\"\n\t$(SHADER_HOOK) --variables-in $(SHADER_VARIABLES_IN) --variables-out $(SHADER_VARIABLES_OUT) --shaders-folder $(SHADER_FOLDER) --shaders-desc $(SHADER_DESC) --shaders-header $(SHADER_HEADER) --shaders-loader $(SHADER_LOADER)\n\n### Main\n\nclean:\n\t@rm -vrf $(BINDIR)\n\t@rm -vf $(CPPBIN) $(RESBIN)\n" }, { "alpha_fraction": 0.7126948833465576, "alphanum_fraction": 0.7126948833465576, "avg_line_length": 17.70833396911621, "blob_id": "af3f8f4130535f9061eb8296d55e2e137add3d5d", "content_id": "f33515dad0e8698f71b01127adcc656de373fbbe", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 449, "license_type": "no_license", "max_line_length": 76, "num_lines": 24, "path": "/src/cpp/ai/path/DebugPathHolder.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_AI_PATH_DEBUGPATHHOLDER_HPP_\n#define FYP_AI_PATH_DEBUGPATHHOLDER_HPP_\n\n#include \"PathCommon.h\"\n\n#include <vector>\n\n#include \"util/DeltaTime.hpp\"\n#include \"render/RenderManager.hpp\"\n\nnamespace ai {\n\tnamespace path {\n\t\tclass DebugPathHolder {\n\t\t\tpublic:\n\t\t\t\tstd::vector<struct PathNode *> nodes;\n\t\t\t\tDebugPathHolder();\n\t\t\t\t~DebugPathHolder();\n\t\t\t\tvoid render(util::DeltaTime &deltaTime, render::RenderManager &manager);\n\t\t};\n\t}\n}\n\n\n#endif\n" }, { "alpha_fraction": 0.7445194125175476, "alphanum_fraction": 0.7445194125175476, "avg_line_length": 24.23404312133789, "blob_id": "ece837b6f9b75289b6da0b73101d8867f8f034bf", "content_id": "682999b56b7c6e750b704d96d5d223ea42ea62ba", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1186, "license_type": "no_license", "max_line_length": 90, "num_lines": 47, "path": "/src/cpp/gui/GUIHolder.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_GUI_GUIHOLDER_HPP_\n#define FYP_GUI_GUIHOLDER_HPP_\n\nclass GUIHolder;\nclass Font;\nstruct CursorPos;\nstruct DisplayMetrics;\nclass GLFWwindow;\n\n#include \"ui/GUI.hpp\"\n\nstruct CursorPos {\n\tdouble x,y,dx,dy;\n};\nstruct DisplayMetrics {\n\tdouble widthMM,heightMM,aspectRatio;\n\tint widthPx,heightPx;\n};\n\nclass GUIHolder {\n\tpublic:\n\t\tGUIHolder();\n\t\t~GUIHolder();\n\t\tvoid runGame();\n\t\tvoid openRootGUI(GUI *gui);\n\t\tvoid openGUI(GUI *gui);\n\t\tvoid closeGUI(GUI *gui);\n\t\tvoid closeSurfaceGUI();\n\t\tvoid openMainMenu();//closeRootGUI();\n\t\tbool isGUISurface(GUI *gui);\n\t\tstatic GUIHolder *eventHandler;\n\t\tstatic void onError(int error, const char *msg);\n\t\tstatic void onKeyEvent(GLFWwindow *window, int key, int scancode, int action, int mode);\n\t\tstatic void onCursorPosEvent(GLFWwindow *window, double x, double y);\n\t\tstatic void onMouseButtonEvent(GLFWwindow* window, int button, int action, int mods);\n\t\tstatic void onScrollEvent(GLFWwindow* window, double dx, double dy);\n\t\tvoid setMouseEnabled(bool enabled);\n\t\tstruct CursorPos cursor;\n\t\tstruct DisplayMetrics metrics;\n\t\tstruct DisplayMetrics lastMetrics;\n\t\tGLFWwindow *window;\n\tprivate:\n\t\tGUI *firstGui,*lastGui;\n\t\tFont *font;\n};\n\n#endif\n" }, { "alpha_fraction": 0.699999988079071, "alphanum_fraction": 0.699999988079071, "avg_line_length": 9, "blob_id": "a51dcbc1bdc3190b7272884bc2dca77c5bc15864", "content_id": "8c0a171e7316f63545d64bfc363ebf4a28062d97", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 100, "license_type": "no_license", "max_line_length": 29, "num_lines": 10, "path": "/src/cpp/gui/EventHandler.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"EventHandler.hpp\"\n\nEventHandler::EventHandler()\n{\n\t\n}\nEventHandler::~EventHandler()\n{\n\t\n}\n" }, { "alpha_fraction": 0.7142857313156128, "alphanum_fraction": 0.7142857313156128, "avg_line_length": 9.5, "blob_id": "055e10606f42e9059db4c54dbce0b4f5d966fcdf", "content_id": "484186a1fbd7d7d2bf8881de2b138cab92574e7f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 105, "license_type": "no_license", "max_line_length": 31, "num_lines": 10, "path": "/src/cpp/gui/screens/ConfirmAction.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"ConfirmAction.hpp\"\n\nConfirmAction::ConfirmAction()\n{\n\t\n}\nConfirmAction::~ConfirmAction()\n{\n\t\n}\n" }, { "alpha_fraction": 0.7009395360946655, "alphanum_fraction": 0.71210777759552, "avg_line_length": 30.33888816833496, "blob_id": "232192ebf3513ac050d502b29e28b6f3429b693f", "content_id": "877d23c8995922a8e9a738f12765cfc7e2383c38", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5641, "license_type": "no_license", "max_line_length": 234, "num_lines": 180, "path": "/src/cpp/render/SkeletalAnimation.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"util/StreamUtils.hpp\"\n#include \"util/QuaternionUtils.hpp\"\n\n//d\n#include <iostream>\n#include <glm/gtc/matrix_transform.hpp>\n#include \"render/BasicShapes.hpp\"\n#include \"util/gl.h\"\n#include \"render/shaders/ShaderUtils.hpp\"\n\n#include \"SkeletalAnimation.hpp\"\n\nusing namespace util;\nusing namespace util::StreamUtils;\nusing namespace util::Boundaries;\nusing namespace render;\n\nSkeletalAnimation::SkeletalAnimation(int assetId, std::istream &fp) : Asset(assetId)\n{\n\tthis->setName(readString(fp));\n\tnumFrames = readInt(fp);\n\tnumJoints = readInt(fp);\n\tframeRate = readInt(fp);\n\tnumAnimatedComponents = readInt(fp);\n\t//hierarchy\n\tfor(int i=0;i<numJoints;i++)\n\t{\n\t\tSkeletalAnimationJoint joint;\n\t\tjoint.name = readString(fp);\n\t\tjoint.parent = readInt(fp);\n\t\tjoint.flags = readInt(fp);\n\t\tjoint.startIndex = readInt(fp);\n\t\tthis->hierarchy.push_back(joint);\n\t}\n\t// bounds\n\tfor(int i=0;i<numFrames;i++)\n\t{\n\t\tfloat a0 = readFloat(fp);\n\t\tfloat a1 = readFloat(fp);\n\t\tfloat a2 = readFloat(fp);\n\t\tfloat a3 = readFloat(fp);\n\t\tfloat a4 = readFloat(fp);\n\t\tfloat a5 = readFloat(fp);\n\t\tthis->bounds.push_back(new AABB(glm::vec3(a0, a1, a2), glm::vec3(a3, a4, a5)));\n\t}\n\t// base frame\n\tfor(int i=0;i<numJoints;i++)\n\t{\n\t\tMD5Bone bone;\n\t\tbone.pos.x = readFloat(fp);\n\t\tbone.pos.y = readFloat(fp);\n\t\tbone.pos.z = readFloat(fp);\n\t\tbone.ori.x = readFloat(fp);\n\t\tbone.ori.y = readFloat(fp);\n\t\tbone.ori.z = readFloat(fp);\n\t\tQuaternionUtils::calculateQuaternionW(bone.ori);\n\t\tbaseFrame.push_back(bone);\n\t}\n\t// frames\n\tframeData = new float[numFrames*numAnimatedComponents];\n\tfp.read((char *)frameData, numFrames*numAnimatedComponents*sizeof(float));\n\t// frames>collate\n\tint j = 0;\n\tfor(int f=0;f<numFrames;f++)\n\t{\n\t\tSkeleton skeleton;\n\t\tfor(int i=0;i<numJoints;i++)\n\t\t{\n\t\t\tSkeletalAnimationJoint &joint = this->hierarchy[i];\n\t\t\tMD5Bone bone = this->baseFrame[i];\n\t\t\tj = 0;\n\t\t\tfloat *currentFrameData = &frameData[f*numAnimatedComponents];\n\t\t\tif(joint.flags& 1) bone.pos.x = currentFrameData[joint.startIndex + (j++)];\n\t\t\tif(joint.flags& 2) bone.pos.y = currentFrameData[joint.startIndex + (j++)];\n\t\t\tif(joint.flags& 4) bone.pos.z = currentFrameData[joint.startIndex + (j++)];\n\t\t\tif(joint.flags& 8) bone.ori.x = currentFrameData[joint.startIndex + (j++)];\n\t\t\tif(joint.flags&16) bone.ori.y = currentFrameData[joint.startIndex + (j++)];\n\t\t\tif(joint.flags&32) bone.ori.z = currentFrameData[joint.startIndex + (j++)];\n\t\t\tif(joint.flags&(8|16|32))\n\t\t\t\tQuaternionUtils::calculateQuaternionW(bone.ori);\n\t\t\tif(joint.parent>=0)\n\t\t\t{\n\t\t\t\tMD5Bone &parentBone = skeleton[joint.parent];\n\t\t\t\tbone.pos = parentBone.pos + parentBone.ori*bone.pos;\n\t\t\t\tbone.ori = parentBone.ori*bone.ori;\n\t\t\t\tbone.ori = glm::normalize(bone.ori);\n\t\t\t}\n\t\t\tskeleton.push_back(bone);\n\t\t}\n\t\tthis->frames.push_back(skeleton);\n\t}\n\tdelete [] frameData;\n\tframeData = 0;\n\t//[numFrames, numJoints, frameRate, numAnimatedComponents, hierarchy, bounds, baseframe, frames]\n}\nSkeletalAnimation::~SkeletalAnimation()\n{\n\t\n}\nvoid SkeletalAnimation::write(std::ostream &ost) const\n{\n\tost << \"[\" << this->getAssetID() << \":\" << this->getName() << \".md5anim] \" << this->numFrames << \" frames (\" << this->frameRate << \" fps), \" << this->numJoints << \" joints, \" << this->numAnimatedComponents << \" animation components\";\n}\nvoid SkeletalAnimation::postload()\n{\n\t\n}\n/*\n#include \"render/BasicShapes.hpp\"\nbool renderSkel = false;\nbool renderWeights = false;\n#include \"gui/WindowScreenManager.hpp\"\nvoid SkeletalAnimation::render(render::RenderManager &manager, SkeletalModel &model, float time)\n{\n\t// scale up\n\tglm::mat4 tempV = manager.V;\n\t//manager.V = glm::scale(manager.V, glm::vec3(10.f, 10.f, 10.f));\n\t//manager.markVDirty();\n\t// time scale up\n\tfloat totalAnimationTime = frameRate*time;\n\tint frame = (int)std::fmod(totalAnimationTime,numFrames);\n\tSkeleton &skeleton = this->frames[frame];\n\tmodel.render(manager, skeleton);\n\tif(renderSkel)\n\t\tmodel.renderSkeleton(manager, skeleton);\n\tif(renderWeights)\n\t\tmodel.renderWeights(manager, skeleton);\n\n\tif(glfwGetKey(WindowScreenManager::eventHandler->window, GLFW_KEY_F5))\n\t\trenderSkel = !renderSkel;\n\tif(glfwGetKey(WindowScreenManager::eventHandler->window, GLFW_KEY_F6))\n\t\trenderWeights = !renderWeights;\n\n\t/ *\n\t* /\n\n\tmanager.V = tempV;\n\tmanager.markVDirty();\n}*/\nvoid SkeletalAnimation::renderBounds(RenderManager &rManager, double time)\n{\n\tAABB *aabb = this->bounds[(int)std::fmod((float)this->frameRate*time,numFrames)];\n\trManager.M = glm::translate(rManager.M, glm::vec3(aabb->minX(), aabb->minY(), aabb->minZ()));\n\trManager.M = glm::scale(rManager.M, glm::vec3(aabb->boxHalfSize[0], aabb->boxHalfSize[1], aabb->boxHalfSize[2]));\n\trManager.markMDirty();\n\trender::shaders::ShaderProgram *sp = rManager.useShader(SHADER_solidColor);\n\tglUniform4f(sp->getShaderLocation(false, SHADER_solidColor_solidColor), 0.f, 1.f, 1.f, 0.3f);\n\trManager.enableTransparency();\n\tBasicShapes::renderUnitCube(sp->getShaderLocation(false, SHADERVAR_vertex_position));\n\trManager.disableTransparency();\n}\n\ndouble SkeletalAnimation::getAnimationDuration()\n{\n\treturn (float)this->numFrames/this->frameRate;\n}\nint SkeletalAnimation::getFrame(double time)\n{\n\treturn (int)std::fmod((float)this->frameRate*time,numFrames);\n}\nSkeleton SkeletalAnimation::getFrameSkeleton(int frame)\n{\n\tif(frame<0)\n\t\tframe += this->frames.size();\n\treturn this->frames[frame];\n}\nAABB &SkeletalAnimation::getFrameBounds(int frame)\n{\n\tif(frame<0)\n\t\tframe += this->frames.size();\n\treturn *this->bounds[frame];\n}\nSkeleton SkeletalAnimation::getInterpolatedSkeleton(int firstFrame)\n{\n\treturn this->baseFrame;//TODO\n}\nSkeleton SkeletalAnimation::getSkeleton(double time)\n{\n\treturn this->getFrameSkeleton(this->getFrame(time));\n}\n" }, { "alpha_fraction": 0.6984333992004395, "alphanum_fraction": 0.7127937078475952, "avg_line_length": 27.024391174316406, "blob_id": "23a530160b69b2775dcd8e83303e062deea0a4c1", "content_id": "902ad852d2357d0953ca4f37bc56d49390af7b6b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2298, "license_type": "no_license", "max_line_length": 103, "num_lines": 82, "path": "/src/cpp/gui/screens/GamePauseMenu.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"gui/elements/Button.hpp\"\n#include \"input/Controls.hpp\"\n#include \"gui/ScreenManager.hpp\"\n#include \"gui/screens/PathTest.hpp\"\n#include \"gui/screens/GameView.hpp\"\n\n#include \"render/BasicShapes.hpp\"\n#include \"render/shaders/ShaderUtils.hpp\"\n#include \"render/RenderManager.hpp\"\n#include \"util/gl.h\"\n#include <glm/gtc/matrix_transform.hpp>\n#include <glm/matrix.hpp>\n\n#include <iostream>\n\n#include \"GamePauseMenu.hpp\"\n\nusing namespace screens;\n\nGamePauseMenu::GamePauseMenu() : Screen()\n{\n\tthis->resumeButton = new Button(\"Resume\");\n\tthis->pathTestButton = new Button(\"???\");\n\tthis->exitButton = new Button(\"Quit\");\n\n\tthis->resumeButton->setY(20);\n\tthis->pathTestButton->setY(10);\n\t//this->exitButton->setY(0);\n\n\tthis->addElement(this->resumeButton);\n\tthis->addElement(this->pathTestButton);\n\tthis->addElement(this->exitButton);\n\n\tthis->font = new render::Font(\"cour.ttf\", 22);\n}\nGamePauseMenu::~GamePauseMenu()\n{\n\tdelete this->resumeButton;\n\tdelete this->pathTestButton;\n\tdelete this->exitButton;\n}\nvoid GamePauseMenu::render(util::DeltaTime &deltaTime, render::RenderManager &rManager)\n{\n\tsuper::render(deltaTime, rManager);\n\n\tglm::mat4 boxMat = glm::mat4(1.0f);\n\tdouble s = (1+std::sin(deltaTime.getTime()*4))/20;\n\tboxMat = glm::translate(boxMat, glm::vec3(rManager.getWidthMM()/2, rManager.getHeightMM()/2, 0));\n\tboxMat = glm::scale(boxMat, glm::vec3(1+s, 1+s, 0));\n\t//boxMat = glm::translate(boxMat, glm::vec3(this->font->getTextWidth(\"Paused\", rManager)/4, 22/2, 0));\n\tboxMat = glm::translate(boxMat, glm::vec3(-this->font->getTextWidth(\"Paused\", rManager)/4, 0, 0));\n\trManager.M = boxMat;\n\trManager.markMDirty();\n\tthis->font->printf(\"Paused\", rManager);\n}\nbool GamePauseMenu::onControlEvent(Control control, int action)\n{\n\tif(action==CONTROL_KEYACTION_RELEASE && control==CONTROL_GUI_ESCAPE)\n\t{\n\t\tthis->manager->closeScreen(this);\n\t\treturn true;\n\t}\n\tif((action&CONTROL_MOUSEBUTTONACTION_PRESS) && control==CONTROL_GUI_SELECT)\n\t{\n\t\tif(this->selectedElement==this->resumeButton)\n\t\t{\n\t\t\tthis->manager->closeScreen(this);\n\t\t\treturn true;\n\t\t}\n\t\telse if(this->selectedElement==this->pathTestButton)\n\t\t{\n\t\t\tthis->manager->openRootScreen(new PathTest());\n\t\t\treturn true;\n\t\t}\n\t\telse if(this->selectedElement==this->exitButton)\n\t\t{\n\t\t\tthis->manager->close();\n\t\t\treturn true;\n\t\t}\n\t}\n\treturn true;\n}\n" }, { "alpha_fraction": 0.7449856996536255, "alphanum_fraction": 0.7449856996536255, "avg_line_length": 19.52941131591797, "blob_id": "7cc708cadbf2e39d7d9a6fbeb156d747ca6123be", "content_id": "0a809ef525c3b1cd809866a666848a785bac4070", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 349, "license_type": "no_license", "max_line_length": 59, "num_lines": 17, "path": "/src/cpp/gui/screens/MainMenu.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_GUI_SCREENS_MAINMENU_HPP_\n#define FYP_GUI_SCREENS_MAINMENU_HPP_\n\nclass Button;\n\n#include \"gui/Screen.hpp\"\n#include \"input/Controls.hpp\"\n\nclass MainMenu : public Screen {\n\tpublic:\n\t\tMainMenu();\n\t\tvirtual ~MainMenu();\n\t\tButton *exitButton,*playButton,*pathTestButton;\n\t\tvirtual bool onControlEvent(Control control, int action);\n};\n\n#endif\n" }, { "alpha_fraction": 0.6859875917434692, "alphanum_fraction": 0.6904895901679993, "avg_line_length": 20.53939437866211, "blob_id": "fbd11af972b8c75b9df6b24d55ba4330501c58c2", "content_id": "cdcce96f2600e50f0730074d82a138e76a2a6e78", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3554, "license_type": "no_license", "max_line_length": 79, "num_lines": 165, "path": "/src/cpp/util/AssetManager.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include <iostream>\n#include <fstream>\n#include <boost/iostreams/filtering_streambuf.hpp>\n#include <boost/iostreams/filter/gzip.hpp>\n#include \"util/StreamUtils.hpp\"\n#include \"util/Globals.hpp\"\n#include \"render/MaterialLibrary.hpp\"\n#include \"render/StaticModel.hpp\"\n#include \"render/SkeletalModel.hpp\"\n#include \"render/SkeletalAnimation.hpp\"\n#include \"render/DDSImage.hpp\"\n#include \"ai/path/NavigationGraph.hpp\"\n\n#include \"AssetManager.hpp\"\n\nusing namespace util;\nusing namespace util::StreamUtils;\n\nvoid assetManagerThreadRun()\n{\n\tAssetManager::getAssetManager()->run();\n}\n\nAssetManager *AssetManager::instance = 0;\nAssetManager *AssetManager::getAssetManager()\n{\n\tif(AssetManager::instance==0)\n\t{\n\t\tAssetManager::instance = new AssetManager();\n\t}\n\treturn AssetManager::instance;\n}\nAssetManager::AssetManager()\n{\n\tinstance = 0;\n\tprogress_current = 0;\n\tprogress_total = 0;\n\tmemset(assets, 0, sizeof(assets));\n\tassetManagerThread = 0;\n\tpreload_complete = 0;\n\tpostload_complete = 0;\n}\nAssetManager::~AssetManager()\n{\n\tfor(auto a : this->assets)\n\t\tdelete a;\n}\nvoid AssetManager::init()\n{\n\tthis->assetManagerThread = new std::thread(assetManagerThreadRun);\n}\nvoid AssetManager::cleanup()\n{\n\tthis->assetManagerThread->join();\n\tdelete this->assetManagerThread;\n\tdelete AssetManager::instance;\n}\nvoid AssetManager::run()\n{\n\t//Open file with boost libs\n\tstd::ifstream gzfile(\"assets.gz\", std::ios_base::in | std::ios_base::binary);\n\tif(!gzfile.is_open()) {\n\t\tstd::cerr << \"ERROR: Failed to open asset file\" << std::endl;\n\t\treturn;\n\t}\n\t\n\tboost::iostreams::filtering_streambuf<boost::iostreams::input> inbuf;\n\tinbuf.push(boost::iostreams::gzip_decompressor());\n\tinbuf.push(gzfile);\n\tstd::istream fp(&inbuf);\n\t\n\tAsset *asset;\n\tint assetType = 0, assetId = 0;\n\twhile(!fp.eof())\n\t{\n\t\tfp.read((char *)&assetType, 1);\n\t\tif(fp.eof())\n\t\t\tbreak;\n\t\t//std::cout << \"READ TYPE: \" << assetType << std::endl;\n\t\tswitch(assetType)\n\t\t{\n\t\t\tcase ASSET_MTLLIB:\n\t\t\t\tasset = new render::MaterialLibrary(assetId, fp);\n\t\t\t\tbreak;\n\t\t\tcase ASSET_WAVEFRONT:\n\t\t\t\tasset = new render::StaticModel(assetId, fp);\n\t\t\t\tbreak;\n\t\t\tcase ASSET_MD5MESH:\n\t\t\t\tasset = new render::SkeletalModel(assetId, fp);\n\t\t\t\tbreak;\n\t\t\tcase ASSET_MD5ANIM:\n\t\t\t\tasset = new render::SkeletalAnimation(assetId, fp);\n\t\t\t\tbreak;\n\t\t\tcase ASSET_DDS:\n\t\t\t\tasset = new render::DDSImage(assetId, fp);\n\t\t\t\tbreak;\n\t\t\tcase ASSET_NAVMESH:\n\t\t\t\tasset = new ai::path::NavigationGraph(assetId, fp);\n\t\t\t\tbreak;\n\t\t\tdefault:\n\t\t\t\tutil::Globals::fatalError(\"Unknown asset type \"+std::to_string(assetType));\n\t\t\t\treturn;\n\t\t}\n\t\tthis->assets[assetId] = asset;\n\t\tassetId++;\n\t}\n\tpreload_complete = true;\n}\nbool AssetManager::postload()\n{\n\tif(postload_complete)\n\t\treturn true;\n\tif(!preload_complete)\n\t\treturn false;\n\tfor(Asset *a : this->assets)\n\t{\n\t\ta->postload();\n\t}\n\tpostload_complete = true;\n\treturn true;\n}\nfloat AssetManager::getProgress()\n{\n\tprogress_mutex.lock();\n\tif(progress_total<=0)\n\t{\n\t\treturn 0.f;\n\t}\n\tfloat f = (float)progress_current/(float)progress_total;\n\tprogress_mutex.unlock();\n\treturn f;\n}\nAsset *AssetManager::getAsset(int assetId)\n{\n\treturn this->assets[assetId];\n}\nAsset::Asset(int assetId)\n{\n\tthis->_assetId = assetId;\n}\nAsset::~Asset()\n{\n\t\n}\nint Asset::getAssetID() const\n{\n\treturn this->_assetId;\n}\nstd::string Asset::getName() const\n{\n\treturn this->name;\n}\nvoid Asset::setName(std::string name)\n{\n\tthis->name = name;\n}\nvoid Asset::write(std::ostream &ost) const\n{\n\tost << \"[\" << getAssetID() << \":\" << getName() << \"]\";\n}\nstd::ostream &operator<<(std::ostream &ost, const Asset &asset)\n{\n\tasset.write(ost);\n\treturn ost;\n}\n" }, { "alpha_fraction": 0.6938775777816772, "alphanum_fraction": 0.7142857313156128, "avg_line_length": 11.25, "blob_id": "db7d67dd1d792a1d37bbc15a560300a872101366", "content_id": "eb440f2a5c2cb1b232bc5252ceb92afc13a96c20", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 98, "license_type": "no_license", "max_line_length": 31, "num_lines": 8, "path": "/src/cpp/render/shaders/code/solidColor_fragment.c", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "//fragment\n\nimport uniform vec4 solidColor;\nout vec4 color;\n\nvoid main() {\n\tcolor = solidColor;\n}\n" }, { "alpha_fraction": 0.7453973889350891, "alphanum_fraction": 0.7516838908195496, "avg_line_length": 27.18987274169922, "blob_id": "19dbe95c819980ff56074dd9cad07f1be949c573", "content_id": "799af1606472a5ca84039b37e034fa0fc6d5d8da", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2227, "license_type": "no_license", "max_line_length": 102, "num_lines": 79, "path": "/src/cpp/world/World.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_WORLD_WORLD_HPP_\n#define FYP_WORLD_WORLD_HPP_\n\n#include <list>\n#include \"render/StaticModel.hpp\"\n\n#include \"world/entities/Enemy.hpp\"\n#include \"world/entities/Player.hpp\"\n#include \"world/entities/Turret.hpp\"\n#include \"world/entities/SecurityCamera.hpp\"\n\n#include \"render/RenderManager.hpp\"\n\n#include \"util/DeltaTime.hpp\"\n\n#include \"input/Controls.hpp\"\n#include \"input/ControlScheme.hpp\"\n\n#include \"ai/path/NavigationGraph.hpp\"\n\n#include \"gui/screens/GameView.hpp\"\n\n#define SELECTOR_OFF 0\n#define SELECTOR_SECURITY_CAMERA 1\n#define SELECTOR_TURRET 2\n#define SELECTOR_REMOVE 3\n\n#define GAME_OVER_ENEMY_DESTROYED 0\n#define GAME_OVER_PLAYER_DESTROYED 1\n#define GAME_OVER_ENEMY_ESCAPED_W_ARTEFACT 2\n#define GAME_OVER_ENEMY_ESCAPES_WO_ARTEFACT 3\n\nnamespace world {\n\tclass World {\n\tprivate:\n\t\t// World models\n\t\trender::StaticModel *world_interactive_model;\n\t\trender::StaticModel *world_aesthetic_model;\n\t\trender::StaticModel *world_skybox;\n\t\tcollisions::StaticMesh *world_interactive_collision;\n\t\t\n\t\t// Selector\n\t\tint selector;\n\t\tbool enableSelector;\n\t\tworld::entities::SecurityCamera *selectorCamera;\n\t\tworld::entities::Turret *selectorTurret;\n\t\tworld::Entity *selectorRemove;\n\t\t\n\t\tstd::list<world::Entity *> entities;\n\t\tstd::list<world::Entity *> spawningEntities;\n\t\t\n\t\tworld::entities::Player *player;\n\t\tworld::entities::Enemy *enemy;\n\t\t\n\t\tcontrols::ControlScheme *controlScheme;\n\t\t\n\t\tdouble vertAngle, horizAngle, lastX, lastY;\n\t\tfloat selectorYaw;\n\t\tglm::vec3 viewDirection, viewUp;\n\t\tscreens::GameView *gameView;\n\tpublic:\n\t\tai::path::NavigationGraph *world_navigation_graph;\n\t\tWorld(screens::GameView *gameView);\n\t\t~World();\n\t\tvoid add(Entity *ent, glm::vec3 location);\n\t\tvoid tick(util::DeltaTime &deltaTime, bool surface);\n\t\tvoid render3D(render::RenderManager &manager, bool isSurface);\n\t\tvoid render2D(render::RenderManager &manager, bool isSurface);\n\t\tstd::list<world::Entity *> *getEntities();\n\t\tutil::Boundaries::RaycastResult rayCast(util::Boundaries::Raycast &raycast, world::Entity **entity);\n\t\tvoid onDebugControl(Control control, int action);\n\t\tvoid onRayHit(glm::vec3 rayOrigin, float distance, glm::vec3 normal);\n\t\tvoid gameOver(int type);\n\t\tint remainingTurrets;\n\t\tint remainingCameras;\n\t};\n}\n\n#endif\n" }, { "alpha_fraction": 0.7603305578231812, "alphanum_fraction": 0.7603305578231812, "avg_line_length": 21, "blob_id": "8a9c035e2f3c49724b4549a01f8421dce5f9f7d4", "content_id": "92328ca61f84ee726dd74ae4b80ecc41e953db11", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 484, "license_type": "no_license", "max_line_length": 76, "num_lines": 22, "path": "/src/cpp/input/controls/DebugControls.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_INPUT_CONTROLS_DEBUGCONTROLS_HPP_\n#define FYP_INPUT_CONTROLS_DEBUGCONTROLS_HPP_\n\nnamespace world {\n\tclass Entity;\n}\n\n#include \"input/ControlScheme.hpp\"\n\nnamespace controls {\n\tclass DebugControls : public ControlScheme {\n\t\tprivate:\n\t\t\tdouble lastCursorX,lastCursorY,cursorX,cursorY,cursorDeltaX,cursorDeltaY;\n\t\t\tbool lastCursor;\n\t\tpublic:\n\t\t\tDebugControls(world::Entity *entity);\n\t\t\tvirtual ~DebugControls();\n\t\t\tvirtual void tick(util::DeltaTime &deltaTime);\n\t};\n}\n\n#endif\n" }, { "alpha_fraction": 0.6651870608329773, "alphanum_fraction": 0.6886493563652039, "avg_line_length": 30.22772216796875, "blob_id": "4864a7541930726cdea3de347b2f6f15a19ec0bc", "content_id": "c80437f02729398b308e960a0cd3c17842de1e5d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3154, "license_type": "no_license", "max_line_length": 150, "num_lines": 101, "path": "/src/cpp/input/controls/DebugControls.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"util/DeltaTime.hpp\"\n#include \"util/gl.h\"\n#include <glm/gtc/quaternion.hpp>\n#include \"world/Entity.hpp\"\n#include <iostream>\n\n#include \"DebugControls.hpp\"\n\nusing namespace world;\nusing namespace controls;\n\nDebugControls::DebugControls(world::Entity *controlEntity) : ControlScheme(controlEntity)\n{\n\tthis->lastCursorX = -1000;\n\tthis->lastCursorY = -1000;\n\tthis->cursorX = 0;\n\tthis->cursorY = 0;\n\tthis->cursorDeltaX = 0;\n\tthis->cursorDeltaY = 0;\n\tthis->lastCursor = false;\n}\nDebugControls::~DebugControls()\n{\n\t\n}\nvoid DebugControls::tick(util::DeltaTime &deltaTime)\n{\n\tfloat r;\n\tglm::vec3 movement = glm::vec3(0,0,0);\n\tglm::quat orientation = this->controlEntity->getOrientation();\n\n\t// Mouse controls\n\tGLFWwindow *window = glfwGetCurrentContext();\n\tglfwGetCursorPos(window, &this->cursorX, &this->cursorY);\n\tif(!this->lastCursor)\n\t{\n\t\tthis->lastCursor = true;\n\t}\n\telse\n\t{\n\t\tr = deltaTime.getTimeDelta()*2.f;\n\t\tthis->cursorDeltaX = (this->cursorX-this->lastCursorX)*r;\n\t\tthis->cursorDeltaY = (this->cursorY-this->lastCursorY)*r;\n\t\tif(this->cursorDeltaX!=0 || this->cursorDeltaY!=0)\n\t\t{\n\t\t\tglm::quat q = glm::quat(glm::vec3(0, 0, glm::radians(-this->cursorDeltaX)));\n\t\t\tglm::quat preq = glm::quat(glm::vec3(glm::radians(-this->cursorDeltaY), 0, 0));\n\t\t\torientation = q*orientation*preq;\n\t\t}\n\t}\n\tthis->lastCursorX = this->cursorX;\n\tthis->lastCursorY = this->cursorY;\n\n\t// Orientation changes\n\tr = 2*deltaTime.getTimeDelta();\n\n\t// Relative Roll\n\tif(glfwGetKey(window, GLFW_KEY_1)==GLFW_PRESS)\n\t\torientation = glm::angleAxis(r, glm::vec3( 0, 0,-1))*orientation;\n\tif(glfwGetKey(window, GLFW_KEY_3)==GLFW_PRESS)\n\t\torientation = glm::angleAxis(r, glm::vec3( 0, 0, 1))*orientation;\n\n\t// Relative Pitch\n\tif(glfwGetKey(window, GLFW_KEY_R)==GLFW_PRESS)\n\t\torientation = glm::angleAxis(r, glm::vec3(-1, 0, 0))*orientation;\n\tif(glfwGetKey(window, GLFW_KEY_V)==GLFW_PRESS)\n\t\torientation = glm::angleAxis(r, glm::vec3( 1, 0, 0))*orientation;\n\n\t// Absolute Yaw\n\tif(glfwGetKey(window, GLFW_KEY_Z)==GLFW_PRESS)\n\t\torientation *= glm::angleAxis(r, glm::vec3( 0, 1, 0));\n\tif(glfwGetKey(window, GLFW_KEY_C)==GLFW_PRESS)\n\t\torientation *= glm::angleAxis(r, glm::vec3( 0,-1, 0));\n\n\t// Movement Controls\n\tr = 10.f*deltaTime.getTimeDelta();\n\n\t// Forward/Backward\n\tif(glfwGetKey(window, GLFW_KEY_W)==GLFW_PRESS)\n\t\tmovement += orientation*glm::vec3( 0, r, 0);\n\tif(glfwGetKey(window, GLFW_KEY_S)==GLFW_PRESS)\n\t\tmovement += orientation*glm::vec3( 0, -r, 0);\n\n\t// Left/Right\n\tif(glfwGetKey(window, GLFW_KEY_A)==GLFW_PRESS)\n\t\tmovement += orientation*glm::vec3(-r, 0, 0);\n\tif(glfwGetKey(window, GLFW_KEY_D)==GLFW_PRESS)\n\t\tmovement += orientation*glm::vec3( r, 0, 0);\n\n\t// Down/Up\n\tif(glfwGetKey(window, GLFW_KEY_Q)==GLFW_PRESS)\n\t\tmovement += orientation*glm::vec3( 0, 0,-r);\n\tif(glfwGetKey(window, GLFW_KEY_E)==GLFW_PRESS)\n\t\tmovement += orientation*glm::vec3( 0, 0, r);\n\n\t//glm::vec3 orientationEuler = glm::eulerAngles(orientation);\n\t//std::cout << glm::degrees(orientationEuler.x) << \", \" << glm::degrees(orientationEuler.y) << \", \" << glm::degrees(orientationEuler.z) << std::endl;\n\n\tthis->controlEntity->translate(movement);\n\tthis->controlEntity->setOrientation(orientation);\n}\n" }, { "alpha_fraction": 0.7571428418159485, "alphanum_fraction": 0.7571428418159485, "avg_line_length": 23.5, "blob_id": "2bdd3c1c14879288b75d939ebc691de0edfa5a7b", "content_id": "2b9c94348a3206e376e3477ea8eead86dbbfba5f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 490, "license_type": "no_license", "max_line_length": 83, "num_lines": 20, "path": "/src/cpp/gui/screens/GamePauseMenu.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_GUI_SCREENS_GAMEPAUSEMENU_HPP_\n#define FYP_GUI_SCREENS_GAMEPAUSEMENU_HPP_\n\nclass Button;\n\n#include \"gui/Screen.hpp\"\n#include \"render/Font.hpp\"\n\nclass GamePauseMenu : public Screen {\n\ttypedef Screen super;\n\tpublic:\n\t\tGamePauseMenu();\n\t\t~GamePauseMenu();\n\t\trender::Font *font;\n\t\tButton *exitButton,*resumeButton,*pathTestButton;\n\t\tvirtual void render(util::DeltaTime &deltaTime, render::RenderManager &rManager);\n\t\tvirtual bool onControlEvent(int control, int action);\n};\n\n#endif\n" }, { "alpha_fraction": 0.7075038552284241, "alphanum_fraction": 0.7075038552284241, "avg_line_length": 24.115385055541992, "blob_id": "8f3320d679dcc1fe71e038efcb4e944d4bbf0770", "content_id": "0d7a40df7039ad2776f463bfedffa11fb7a47c86", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 653, "license_type": "no_license", "max_line_length": 74, "num_lines": 26, "path": "/src/cpp/util/DeltaTime.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_UTIL_DELTATIME_HPP_\n#define FYP_UTIL_DELTATIME_HPP_\n\n// c is u/s\n// u/t = u/s * s/t\n#define DELTATIME_ARITHMETIC_PROGRESSION(dt, c) ((dt)->getTimeDelta()*(c))\n#define DELTATIME_GEOMETRIC_PROGRESSION(dt, c) ((dt)->getTimeDelta()*(c))\n\nnamespace util {\n\tclass DeltaTime {\n\t\tdouble targetFramerate,currentTime,deltaTime,framerate,offsetTime;\n\t\tbool vsync;\n\t\tpublic:\n\t\t\tDeltaTime(bool vsync, double targetFramerate);\n\t\t\t~DeltaTime();\n\t\t\tvoid postTime(double time);\n\t\t\tvoid setOffsetTime(double offsetTime);\n\t\t\tdouble getTime();\n\t\t\tdouble getTimeDelta();\n\t\t\tdouble getFramerate();\n\t\t\tdouble getTargetFramerate();\n\t\t\tbool isVsync();\n\t};\n}\n\n#endif\n" }, { "alpha_fraction": 0.7058823704719543, "alphanum_fraction": 0.7058823704719543, "avg_line_length": 9, "blob_id": "6fcc82e41c895213477a690264a7192b42e00269", "content_id": "51acd2048b3fe6b95b98043e2fe9b8d9556611e6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 51, "license_type": "no_license", "max_line_length": 19, "num_lines": 5, "path": "/src/cpp/render/shaders/code/apple.c", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "//fragment\n\nint appleCField;\n\n#include \"common.h\"\n\n" }, { "alpha_fraction": 0.43452176451683044, "alphanum_fraction": 0.4552622437477112, "avg_line_length": 39.51388931274414, "blob_id": "f3183302dffed7228e7f4c677dbf352b9a381faf", "content_id": "8e270856e7764fe002cdc36be55e516f58d3268b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 17502, "license_type": "no_license", "max_line_length": 208, "num_lines": 432, "path": "/hooks/asset_parsers.py", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python5\n\nimport re\n\nfrom asset_common import *\n\nMTL_FILEID = 0\nOBJ_FILEID = 1\nMD5MESH_FILEID = 2\nMD5ANIM_FILEID = 3\nNAVOBJ_FILEID = 5\n\ndef parseMTL(filepath, filename, source_fp, meta, verbose=0):\n names = [\"Ka\", \"Kd\", \"Ks\", \"Tf\", \"d\", \"Ns\", \"Ni\", \"illum\", \"sharpness\", \"map_Ka\", \"map_Kd\", \"map_Ks\", \"map_Ns\", \"map_d\", \"disp\", \"decal\", \"bump\"]\n depends = []\n def flattenArray(material_name, data):\n final_data = [material_name, 0]\n if verbose>=1:\n ap = \"\"\n for i in range(len(data)):\n if data[i]!=None:\n if type(data[i])==str:\n depends.append(filepath+\"/\"+data[i])\n textureAssetId = meta['textures'][filepath+\"/\"+data[i].strip()]\n if verbose==1:\n ap += \", \"+names[i]+\"=[\"+str(textureAssetId)+\"]\"+str(data[i])\n final_data.append(textureAssetId)\n else:\n if verbose==1:\n ap += \", \"+names[i]+\"=\"+str(data[i])\n final_data.append(data[i])\n final_data[1] = final_data[1]|(1<<i)\n if verbose==1:\n print(material_name, end=\" \")\n if verbose==2:\n print(\"\\t\"+material_name+\": Flags=\"+str(final_data[1])+ap)\n return final_data\n\n if verbose==1:\n print(\"\\t\", end=\"\")\n materials = []\n current_material_name = None\n # Color/Illumination\n Ka = None\n Kd = None\n Ks = None\n Tf = None\n illum = None\n d = None\n Ns = None\n sharpness = None\n Ni = None\n # Texture map\n map_Ka = None\n map_Kd = None\n map_Ks = None\n map_Ns = None\n map_d = None\n disp = None\n decal = None\n bump = None\n for line in source_fp:\n line = line[:-1]\n temp = parse1s(None, line, \"newmtl\")\n if temp!=None:\n if current_material_name!=None:\n materials.append(flattenArray(current_material_name, [Ka, Kd, Ks, Tf, d, Ns, Ni, illum, sharpness, map_Ka, map_Kd, map_Ks, map_Ns, map_d, disp, decal, bump]))\n current_material_name = temp\n\n #float[3]\n Ka = parse3f(Ka, line, \"Ka\")\n Kd = parse3f(Kd, line, \"Kd\")\n Ks = parse3f(Ks, line, \"Ks\")\n Tf = parse3f(Tf, line, \"Tf\")\n #float\n d = parse1f(d, line, \"d\")\n Ns = parse1f(Ns, line, \"Ns\")\n Ni = parse1f(Ni, line, \"Ni\")\n #int\n illum = parse1i(illum, line, \"illum\")\n sharpness = parse1i(sharpness, line, \"sharpness\")\n #str\n map_Ka = parse1s(map_Ka, line, \"map_Ka\")\n map_Kd = parse1s(map_Kd, line, \"map_Kd\")\n map_Ks = parse1s(map_Ks, line, \"map_Ks\")\n map_Ns = parse1s(map_Ns, line, \"map_Ns\")\n map_d = parse1s(map_d, line, \"map_d\")\n disp = parse1s(disp, line, \"disp\")\n decal = parse1s(decal, line, \"decal\")\n bump = parse1s(bump, line, \"bump\")\n #Ka, Kd, Ks, Tf, d, Ns, Ni, illum, sharpness, map_Ka, map_Kd, map_Ks, map_Ns, map_d, disp, decal, bump\n\n materials.append(flattenArray(current_material_name, [Ka, Kd, Ks, Tf, d, Ns, Ni, illum, sharpness, map_Ka, map_Kd, map_Ks, map_Ns, map_d, disp, decal, bump]))\n if verbose==1:\n print()\n return MTL_FILEID, None, [len(materials)]+materials, depends\n\ndef parseOBJ(filepath, filename, source_fp, meta, verbose=0):\n depends = []\n enableTextures = None\n enableNormals = None\n numPrimitives = 0\n mtllib = None\n v = []\n vt = []\n vn = []\n object_name = None\n usemtl = \"\"\n s = False\n f = []\n objects = []\n for line in source_fp:\n line = line[:-1]\n tv = parse3f(None, line, \"v\")\n if tv!=None:\n v.append(tv)\n tvt = parse2f(None, line, \"vt\")\n if tvt!=None:\n vt.append(tvt)\n tvn = parse3f(None, line, \"vn\")\n if tvn!=None:\n vn.append(tvn)\n if line.find(\"g \")>=0:\n if object_name!=None:\n objects.append((object_name, meta['materials'][filepath+\"/\"+mtllib+\":\"+usemtl], s, numPrimitives, f))\n object_name = line.split(\" \")[1]\n #print(object_name)\n usemtl = \"\"\n s = False\n numPrimitives = 0\n f = []\n if line.find(\"mtllib \")>=0:\n mtllib = line.split(\" \")[1]\n depends.append(filepath+\"/\"+mtllib)\n if line.find(\"usemtl \")>=0:\n usemtl = line.split(\" \")[1]\n if line.find(\"s \")>=0:\n s = line.split(\" \")[1]==\"on\"\n if line.find(\"f \")==0:\n verticies = line[2:].split(\" \")\n numPrimitives = numPrimitives+1\n if len(verticies)>3:\n print(\"Wavefront file is not triangular!\")\n sys.exit(1)\n for vertex in verticies:\n fin = vertex.split(\"/\")\n faceFinal = [int(fin[0])]\n if len(fin)>1:\n if fin[1]==\"\":\n if enableTextures==True:\n print(\"Textures changed mid file!\")\n sys.exit(1)\n enableTextures = False\n else:\n if enableTextures==False:\n print(\"Textures changed mid file!\")\n sys.exit(1)\n enableTextures = True\n faceFinal.append(int(fin[1]))\n if len(fin)>2:\n if fin[2]==\"\":\n if enableNormals==True:\n print(\"Textures changed mid file!\")\n sys.exit(1)\n enableNormals = False\n else:\n if enableNormals==False:\n print(\"Textures changed mid file!\")\n sys.exit(1)\n enableNormals = True\n faceFinal.append(int(fin[2]))\n f.append(faceFinal)\n #print(meta['materials'][filepath+\"/\"+mtllib+\":\"+usemtl])\n objects.append((object_name, meta['materials'][filepath+\"/\"+mtllib+\":\"+usemtl], s, numPrimitives, f))\n #print((object_name, meta['materials'][filepath+\"/\"+mtllib+\":\"+usemtl], s, numPrimitives, f))\n if verbose>=1:\n print(\"\t\"+str(len(v))+\" vertecies, \"+str(len(vt))+\" texture coordinates, \"+str(len(vn))+\" normal coordinates, \"+str(len(objects))+\" objects\")\n return OBJ_FILEID, None, (len(v), v, len(vt), vt, len(vn), vn, 0, [], len(objects), objects), depends\n\ndef parseNAVOBJ(filepath, filename, source_fp, meta, verbose=0):\n groups = []\n v = []\n l = []\n for line in source_fp:\n line = line[:-1]\n group = parse1s(None, line, \"g\")\n if group!=None:\n groups.append(group)\n tv = parse1i3f(None, line, \"v\")\n if tv==None and line.find(\"v \")>=0:\n print(line)\n sys.exit(1);\n if tv!=None:\n v.append(tv)\n tl = parse2i(None, line, \"l\")\n if tl!=None:\n l.append(tl)\n macros = {}\n for i in range(len(groups)):\n macros[(\"ASSET_\"+filename.replace(\".\", \"_\")+\"_GROUP_\"+groups[i]).upper()] = i\n if verbose==1:\n print(\"\\t\"+str(len(v))+\" verticies, \"+str(len(l))+\" links\")\n elif verbose==2:\n print(\"\\t\"+str(len(v))+\" verticies, \"+str(len(l))+\" links\")\n print(\"\\tGroups:\"+str(groups))\n return NAVOBJ_FILEID, {\"macros\":macros}, (len(groups), len(v), v, len(l), l), None\n\ndef parseMD5Mesh(filepath, filename, source_fp, meta, verbose=0):\n # Data to load and store\n numJoints = 0\n numMeshes = 0\n joints = []\n meshes = []\n # Load from the .md5mesh file\n for line in source_fp:\n if line.find(\"MD5Version\")>=0:\n if line.find(\"MD5Version 10\")!=0:\n print(\"Unknown MD5Version!\")\n return False\n if line.find(\"numJoints\")>=0:\n numJoints = int(line.split(\" \")[1])\n #print(\"numJoints=\", numJoints)\n if line.find(\"numMeshes\")>=0:\n numMeshes = int(line.split(\" \")[1])\n #print(\"numMeshes=\", numMeshes)\n if line.find(\"joints {\")>=0:\n #print(\"Joints\")\n for line2 in source_fp:\n m = re.search(\"^\\s\\\"(.*)\\\"\\s(-?\\d+)\\s\\(\\s(-?\\d*\\.{0,1}\\d+)\\s(-?\\d*\\.{0,1}\\d+)\\s(-?\\d*\\.{0,1}\\d+)\\s\\)\\s\\(\\s(-?\\d*\\.{0,1}\\d+)\\s(-?\\d*\\.{0,1}\\d+)\\s(-?\\d*\\.{0,1}\\d+)\\s\\).*$\",line2)\n if m:\n try:\n name = m.group(1)\n parent = int(m.group(2))\n pos = [0, 0, 0]\n pos[0] = float(m.group(3))\n pos[1] = float(m.group(4))\n pos[2] = float(m.group(5))\n ori = [0, 0, 0]\n ori[0] = float(m.group(6))\n ori[1] = float(m.group(7))\n ori[2] = float(m.group(8))\n joints.append((name, parent, pos, ori))\n except Exception as e:\n print(\"Error loading joint\", e)\n if line2.find(\"}\")>=0:\n #print(\"Loaded\", len(joints), \"joints\")\n break\n if line.find(\"mesh {\")>=0:\n #print(\"Mesh\")\n shader_id = None\n numverts = 0\n verts = []\n numtris = 0\n tris = []\n numweights = 0\n weights = []\n for line2 in source_fp:\n if line2.find(\"shader \")>=0:\n shader_name = line2.split(\" \")[1][1:-2]\n try:\n shader_id = meta[\"materials\"][filepath+\"/\"+filename[:filename.rfind(\".\")]+\".mtl:\"+shader_name]\n except KeyError:\n shader_id = (-1, -1)\n\n # Mesh->Vert\n if line2.find(\"numverts \")>=0:\n numverts = int(line2.split(\" \")[1])\n #print(\"numVerts \", numverts)\n if line2.find(\"vert \")>=0:#vert vertIndex ( s t ) startWeight countWeight\n m = re.search(\"^\\svert\\s(\\d+)\\s\\(\\s(-?\\d*\\.{0,1}\\d+)\\s(-?\\d*\\.{0,1}\\d+)\\s\\)\\s([0-9]+)\\s([0-9]+).*$\", line2)\n if m:\n try:\n vertIndex = int(m.group(1))\n tex = [0, 0]\n tex[0] = float(m.group(2))\n tex[1] = float(m.group(3))\n startWeight = int(m.group(4))\n countWeight = int(m.group(5))\n verts.append((vertIndex, tex, startWeight, countWeight))\n except Exception as e:\n print(\"Error loading vert\", e)\n else:\n print(\"Vert match failed\", line2)\n\n # Mesh->Tri\n if line2.find(\"numtris \")>=0:\n numtris = int(line2.split(\" \")[1])\n #print(\"numTris\", numtris)\n if line2.find(\"tri \")>=0:#tri triIndex vertIndex[0] vertIndex[1] vertIndex[2]\n m = re.search(\"^\\stri\\s([0-9]+)\\s([0-9]+)\\s([0-9]+)\\s([0-9]+).*$\", line2)\n if m:\n try:\n triIndex = int(m.group(1))\n vertIndex0 = int(m.group(2))\n vertIndex1 = int(m.group(3))\n vertIndex2 = int(m.group(4))\n tris.append((triIndex, vertIndex0, vertIndex1, vertIndex2))\n except Exception as e:\n print(\"Error loading tri\", e)\n else:\n print(\"Tri match failed\", line2)\n\n # Mesh->weight\n if line2.find(\"numweights \")>=0:\n numweights = int(line2.split(\" \")[1])\n #print(\"numWeights\", numweights)\n if line2.find(\"weight \")>=0:#weight weightIndex joint bias ( pos.x pos.y pos.z )\n #weight 17 15 1.000000 ( -0.288115 1.239427 0.166542 )\n m = re.search(\"^\\sweight\\s(-?\\d+)\\s(-?\\d+)\\s(-?\\d*\\.{0,1}\\d+)\\s\\(\\s(-?\\d*\\.{0,1}\\d+)\\s(-?\\d*\\.{0,1}\\d+)\\s(-?\\d*\\.{0,1}\\d+)\\s\\).*$\", line2)\n if m:\n try:\n weightIndex = int(m.group(1))\n joint = int(m.group(2))\n bias = float(m.group(3))\n pos = [0, 0, 0]\n pos[0] = float(m.group(4))\n pos[1] = float(m.group(5))\n pos[2] = float(m.group(6))\n weights.append((weightIndex, joint, bias, pos))\n except Exception as e:\n print(\"Error loading tri\", e)\n else:\n print(\"Tri match failed\", line2)\n if line2.find(\"}\")>=0:\n break\n if len(verts)!=numverts:\n print(\"Expected to load \"+str(numverts)+\" verts, but instead loaded \"+str(len(verts)))\n if len(tris)!=numtris:\n print(\"Expected to load \"+str(numtris)+\" tris, but instead loaded \"+str(len(tris)))\n if len(weights)!=numweights:\n print(\"Expected to load \"+str(numweights)+\" weights, but instead loaded \"+str(len(weights)))\n meshes.append((shader_id, numverts, verts, numtris, tris, numweights, weights))\n # Create joint macros\n macros = {}\n for i in range(len(joints)):\n macros[(\"ASSET_\"+filename+\"_JOINT_\"+joints[i][0]).replace(\".\", \"_\").upper()] = i\n if verbose>=1:\n print(\"\\t\"+str(numJoints)+\" joints, \"+str(numMeshes)+\" meshes\")\n return MD5MESH_FILEID, {\"macros\":macros}, (numJoints, joints, numMeshes, meshes), None\n\ndef parseMD5Anim(filepath, filename, source_fp, meta, verbose=0):\n # args\n #hierarchy\n #bounds\n #baseframe\n #frame 0..n\n numFrames = None\n numJoints = None\n frameRate = None\n numAnimatedComponents = None\n hierarchy = []\n bounds = []\n baseframe = []\n frames = []\n for line in source_fp:\n #print(line)\n line = line[:-1]#Remove \\n\n\n temp_numFrames = parse1i(None, line, \"numFrames\")\n if temp_numFrames!=None:\n numFrames = temp_numFrames\n frames = [0]*temp_numFrames\n numJoints = parse1i(numJoints, line, \"numJoints\")\n frameRate = parse1i(frameRate, line, \"frameRate\")\n numAnimatedComponents = parse1i(numAnimatedComponents, line, \"numAnimatedComponents\")\n\n if line.find(\"hierarchy {\")>=0:\n for line2 in source_fp:\n m = re.search(\"^\\s+\\\"(.*)\\\"\\s*\"+REGEX_INT_CLASS+\"\\s+\"+REGEX_INT_CLASS+\"\\s+\"+REGEX_INT_CLASS+\".*$\", line2)\n\n if m:\n name = m.group(1)\n parent = int(m.group(2))\n flags = int(m.group(3))\n startIndex = int(m.group(4))\n hierarchy.append([name, parent, flags, startIndex])\n\n # Break line2\n if line2.find(\"}\")>=0:\n break\n\n\n if line.find(\"bounds {\")>=0:\n for line2 in source_fp:\n\n #( -1.634066 -1.634066 -1.634066 ) ( -1.634066 6.444685 5.410537 )\n m = re.search(\"^\\s*\\(\\s*\"+REGEX_FLOAT_CLASS+\"\\s+\"+REGEX_FLOAT_CLASS+\"\\s+\"+REGEX_FLOAT_CLASS+\"\\s*\\)\\s*\\(\\s*\"+REGEX_FLOAT_CLASS+\"\\s+\"+REGEX_FLOAT_CLASS+\"\\s+\"+REGEX_FLOAT_CLASS+\"\\s*\\).*$\", line2)\n if m:\n mins = [float(m.group(1)), float(m.group(2)), float(m.group(3))]\n maxs = [float(m.group(4)), float(m.group(5)), float(m.group(6))]\n bounds.append([mins, maxs])\n\n # Break line2\n if line2.find(\"}\")>=0:\n break\n\n if line.find(\"baseframe {\")>=0:\n for line2 in source_fp:\n\n m = re.search(\"^\\s*\\(\\s*\"+REGEX_FLOAT_CLASS+\"\\s+\"+REGEX_FLOAT_CLASS+\"\\s+\"+REGEX_FLOAT_CLASS+\"\\s*\\)\\s*\\(\\s*\"+REGEX_FLOAT_CLASS+\"\\s+\"+REGEX_FLOAT_CLASS+\"\\s+\"+REGEX_FLOAT_CLASS+\"\\s*\\).*$\", line2)\n if m:\n pos = [float(m.group(1)), float(m.group(2)), float(m.group(3))]\n ori = [float(m.group(4)), float(m.group(5)), float(m.group(6))]\n bounds.append([pos, ori])\n \n # Break line2\n if line2.find(\"}\")>=0:\n break\n\n m = re.match(\"frame\\s*(\\d+)\\s*{\", line)\n if m:\n frameNum = int(m.group(1))\n frame = []\n for line2 in source_fp:\n\n # Break line2\n if line2.find(\"}\")>=0:\n break\n\n m = re.search(\"^\\s+(.*?)\\s*$\", line2)\n if m:\n floats_line = m.group(1)\n floats_split = floats_line.split(\" \")\n for f in floats_split:\n frame.append(float(f))\n \n\n frames[frameNum] = frame\n\n if(verbose==1):\n print(\"\\t\"+str(numFrames)+\" frames, \"+str(numJoints)+\" joints, \"+str(numAnimatedComponents)+\" Animted Components\")\n return MD5ANIM_FILEID, None, [numFrames, numJoints, frameRate, numAnimatedComponents, hierarchy, bounds, baseframe, frames], None\n" }, { "alpha_fraction": 0.7008234262466431, "alphanum_fraction": 0.706312894821167, "avg_line_length": 24.418603897094727, "blob_id": "805c9b91199868786934d0e4e3725d3cab489d38", "content_id": "83164df8fcec5888cb98ffe7c8b8c4aaba80a91f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1093, "license_type": "no_license", "max_line_length": 128, "num_lines": 43, "path": "/src/cpp/util/AssetUtils.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"util/Globals.hpp\"\n#include \"util/AssetManager.hpp\"\n#include \"render/DDSImage.hpp\"\n\n#include \"AssetUtils.hpp\"\n\nusing namespace util;\nusing namespace render;\n\n#define GET_ASSET(id) AssetManager::getAssetManager()->getAsset(id)\n\nMaterial *AssetUtils::getMaterial(int assetId, int materialId)\n{\n\tAsset *asset = GET_ASSET(assetId);\n\tif(asset==0)\n\t{\n\t\tutil::Globals::fatalError(\"No such material library (MTL) \"+std::to_string(assetId));\n\t\treturn 0;\n\t}\n\tMaterialLibrary *mtl = dynamic_cast<MaterialLibrary *>(asset);\n\tif(mtl==0)\n\t{\n\t\tutil::Globals::fatalError(\"No such material \"+std::to_string(materialId)+\"in material library (MTL)\"+std::to_string(assetId));\n\t\treturn 0;\n\t}\n\treturn mtl->getMaterial(materialId);\n}\nvoid AssetUtils::bindTexture(int assetId)\n{\n\tAsset *asset = GET_ASSET(assetId);\n\tif(asset==0)\n\t{\n\t\tutil::Globals::fatalError(\"No such texture asset \"+std::to_string(assetId));\n\t\treturn;\n\t}\n\tDDSImage *img = dynamic_cast<DDSImage *>(asset);\n\tif(img==0)\n\t{\n\t\tutil::Globals::fatalError(\"Asset \"+std::to_string(assetId)+\" is not a texture\");\n\t\treturn;\n\t}\n\timg->bindTexture();\n}\n" }, { "alpha_fraction": 0.7462753653526306, "alphanum_fraction": 0.7462753653526306, "avg_line_length": 32.56060791015625, "blob_id": "0ba0d1ceea61d3c2b1d693c5524806d86082ed4f", "content_id": "dfad28ebf5a76dabc64ced5b8575850eea83fdb7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2215, "license_type": "no_license", "max_line_length": 144, "num_lines": 66, "path": "/tmp/StaticMesh.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef __STATICMESH_H_INCLUDED__\n#define __STATICMESH_H_INCLUDED__\n\nnamespace world {\n\tnamespace collisions {\n\t\tclass StaticMesh;\n\t\ttypedef struct StaticMeshTriangle StaticMeshTriangle;\n//typedef struct StaticMeshCell StaticMeshCell;\n\t}\n}\n\n#include <vector>\n#include \"util/Boundaries.hpp\"\n#include \"render/StaticModel.hpp\"\n\n//debug\n#include \"render/RenderManager.hpp\"\n#include \"render/BasicShapes.hpp\"\n\nnamespace world {\n\tnamespace collisions {\n\t\tstruct StaticMeshCell {\n\t\t\tutil::Boundaries::AABB *bounds;\n\t\t\tint level;\n\t\t\tbool leaf;\n\t\t\tstd::vector<struct StaticMeshCell> children;\n\t\t\tstd::vector<struct StaticMeshTriangle> triangles;\n\t\t};\n\t\tstruct StaticMeshTriangle {\n\t\t\trender::StaticModel *model;\n\t\t\trender::StaticModelObject *modelObject;\n\t\t\tint triangleIndex;\n\t\t};\n\t\tclass StaticMesh {\n\t\t\tprivate:\n\t\t\t\tStaticMeshCell rootNode;\n\t\t\tpublic:\n\t\t\t\tStaticMesh();\n\t\t\t\t~StaticMesh();\n\t\t\t\t//## Misc utils\n\t\t\t\t\n\t\t\t\t// Adds a static model to the static mesh collision checker\n\t\t\t\tvoid addStaticModel(render::StaticModel *model);\n\t\t\t\t// Adds a triangle to a cell and its children if needed\n\t\t\t\tvoid addTriangleToCell(StaticMeshTriangle &triangle, StaticMeshCell &cell, int cellLevel);\n\t\t\t\t// Adds a triangle to a cells children\n\t\t\t\tvoid addTriangleToCellChildren(StaticMeshTriangle &triangle, StaticMeshCell &parent, int childrenLevel);\n\n\t\t\t\t// Divides a cell into pieces\n\t\t\t\tvoid subdivideCell(StaticMeshCell &parent, int childrenLevel);\n\t\t\t\t// Adds a child cell to parent cell\n\t\t\t\tvoid addChildToCell(StaticMeshCell &parent, int childIndex, util::Boundaries::AABB *boxBounds);\n\n\t\t\t\t// debug\n\t\t\t\tvoid render(render::RenderManager &rManager, util::Boundaries::AABB &selectionBox);\n\t\t\t\tvoid renderCellChildren(render::RenderManager &rManager, util::Boundaries::AABB &selectionBox, StaticMeshCell *cell);\n\n\t\t\t\t// Raycasting functions\n\t\t\t\tutil::Boundaries::RaycastResult *rayCast(util::Boundaries::Raycast &raycast);\n\t\t\t\tStaticMeshTriangle *rayCastCellChildren(util::Boundaries::Raycast &raycast, StaticMeshCell &cell, util::Boundaries::RaycastResult &result);\n\t\t\t\tStaticMeshTriangle *rayCastCellTriangles(util::Boundaries::Raycast &raycast, StaticMeshCell &cell, util::Boundaries::RaycastResult &result);\n\t\t};\n\t}\n}\n\n#endif\n" }, { "alpha_fraction": 0.7160369157791138, "alphanum_fraction": 0.7251225709915161, "avg_line_length": 38.397727966308594, "blob_id": "e8060d0ca884781ffe6e07e8b6b040da0695ac8f", "content_id": "9ec62bc3bb62dbd5fa6516291aa3e4d3ae52831e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6934, "license_type": "no_license", "max_line_length": 162, "num_lines": 176, "path": "/src/cpp/ai/ObjectiveManager.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include <glm/gtx/norm.hpp>\n#include \"res/obj/world.nav.obj.h\"\n#include \"world/World.hpp\"\n\n#include \"ObjectiveManager.hpp\"\n\nusing namespace ai;\nusing namespace ai::path;\nusing namespace world;\nusing namespace world::entities;\nusing namespace util;\n\nObjectiveManager::ObjectiveManager(World *pWorld, Enemy *pEnemy, NavigationGraph *pNavigationGraph)\n{\n\tthis->world = pWorld;\n\tthis->enemy = pEnemy;\n\tthis->pathExecuter = new PathExecuter(5.f, pEnemy, pNavigationGraph);\n\tthis->currentObjective = OBJECTIVE_WAITING;\n\tthis->pathFinder = new PathFinder();\n\tthis->pathExplorer = new PathExplorer(this->world->world_navigation_graph);\n\tthis->navigationGraph = pNavigationGraph;\n\n\tthis->pathExplorer->setGroupWhitelist(1<<ASSET_WORLD_NAV_OBJ_GROUP_EXPLORE);\n\tthis->pathExplorer->setGroupPOI(1<<ASSET_WORLD_NAV_OBJ_GROUP_ARTEFACT);\n\tthis->pathExplorer->setGroupPortal(1<<ASSET_WORLD_NAV_OBJ_GROUP_PORTAL);\n\n\tthis->perception = new PerceptionManager(pEnemy, {&typeid(Turret), &typeid(Player)}, glm::vec3());\n\tthis->aimBot = new AimBot(this->perception);\n\n\tthis->entranceNode = 0;\n\tthis->artefactCollectionStartTime = 0.f;\n\tthis->targetNode = 0;\n\tthis->targetRadius = 1.f;\n\tthis->collectedArtefact = false;\n}\nObjectiveManager::~ObjectiveManager()\n{\n\tdelete this->pathExecuter;\n\tdelete this->pathFinder;\n\tdelete this->pathExplorer;\n}\nvoid ObjectiveManager::tick(DeltaTime &deltaTime)\n{\n\tif(!this->pathFinder->done)\n\t{\n\t\tif(this->pathFinder->tick(20) && this->pathFinder->sucess)\n\t\t\tthis->pathExecuter->postPath(this->pathFinder->getPath());\n\t}\n\tthis->pathExecuter->tick(deltaTime);\n\tPathNode *currentNode;\n\tswitch(this->currentObjective)\n\t{\n\t\tcase OBJECTIVE_WAITING:\n\t\t\tif(deltaTime.getTime()-this->enemy->getSpawnTime()>=OBJECTIVE_WAITING_WAIT_TIME)\n\t\t\t{\n\t\t\t\t// Select an artefact\n\t\t\t\tthis->artefactNode = this->navigationGraph->getRandomNode(ASSET_WORLD_NAV_OBJ_GROUP_ARTEFACT);\n\t\t\t\tthis->entranceNode = this->navigationGraph->getRandomNode(ASSET_WORLD_NAV_OBJ_GROUP_EXIT);\n\t\t\t\tthis->enemy->setPosition(this->entranceNode->position);\n\t\t\t\tthis->targetNode = 0;\n\t\t\t\tthis->visitedExitNodes.clear();\n\t\t\t\tthis->visitedExitNodes.insert(this->entranceNode->id);\n\t\t\t\tthis->currentObjective = OBJECTIVE_LOCATE_ARTEFACT;\n\t\t\t}\n\t\t\treturn;\n\t\tcase OBJECTIVE_LOCATE_ARTEFACT:\n\t\t\t// If we've found the artefact\n\t\t\tif(glm::distance2(this->enemy->getPosition(), this->artefactNode->position)<=2*2)\n\t\t\t{\n\t\t\t\tthis->artefactCollectionStartTime = deltaTime.getTime();\n\t\t\t\tthis->currentObjective = OBJECTIVE_COLLECT_ATREFACT;\n\t\t\t}\n\t\t\t// If we reach the latest portal node\n\t\t\telse if(this->targetNode==0 || glm::distance2(this->enemy->getPosition(), this->targetNode->position)<=this->targetRadius*this->targetRadius)\n\t\t\t{\n\t\t\t\tcurrentNode = this->navigationGraph->getNearestPathNode(this->enemy->getPosition(), 1<<ASSET_WORLD_NAV_OBJ_GROUP_HIGHDETAIL);\n\t\t\t\tthis->targetNode = this->pathExplorer->getNext(this->navigationGraph->getNearestPathNode(this->enemy->getPosition(), 1<<ASSET_WORLD_NAV_OBJ_GROUP_LOWDETAIL));\n\t\t\t\t// Find adjacent exit node\n\t\t\t\tPathNode *p;\n\t\t\t\tfor(auto *l : this->targetNode->links)\n\t\t\t\t{\n\t\t\t\t\tp = l->getOther(this->targetNode);\n\t\t\t\t\tif(this->navigationGraph->getPathNodeGroupMask(p->id)&(1<<ASSET_WORLD_NAV_OBJ_GROUP_EXIT))\n\t\t\t\t\t{\n\t\t\t\t\t\tthis->visitedExitNodes.insert(p->id);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tthis->targetNode = this->navigationGraph->getNearestPathNode(this->targetNode->position, 1<<ASSET_WORLD_NAV_OBJ_GROUP_HIGHDETAIL);\n\t\t\t\tthis->pathFinder->start(currentNode, this->targetNode);\n\t\t\t}\n\t\t\tthis->perceptionCheck(deltaTime);\n\t\t\treturn;\n\t\tcase OBJECTIVE_COLLECT_ATREFACT:\n\t\t\t// If we've been collecting the artefact long enough\n\t\t\tif(deltaTime.getTime()-this->artefactCollectionStartTime>=OBJECTIVE_COLLECT_ARTEFACT_WAIT_TIME)\n\t\t\t{\n\t\t\t\tcurrentNode = this->navigationGraph->getNearestPathNodeFromPool(this->enemy->getPosition(), this->visitedExitNodes);\n\t\t\t\tthis->targetNode = this->navigationGraph->getNearestPathNode(currentNode->position, 1<<ASSET_WORLD_NAV_OBJ_GROUP_HIGHDETAIL);\n\t\t\t\tcurrentNode = this->navigationGraph->getNearestPathNode(this->enemy->getPosition(), 1<<ASSET_WORLD_NAV_OBJ_GROUP_HIGHDETAIL);\n\t\t\t\tthis->pathFinder->start(currentNode, this->targetNode);\n\t\t\t\tthis->collectedArtefact = true;\n\t\t\t\tthis->currentObjective = OBJECTIVE_EXIT_MUSEUM;\n\t\t\t}\n\t\t\treturn;\n\t\tcase OBJECTIVE_EXIT_MUSEUM:\n\t\t\t// If we've reached the exit\n\t\t\tif(this->targetNode==0 || glm::distance2(this->enemy->getPosition(), this->targetNode->position)<=this->targetRadius*this->targetRadius)\n\t\t\t{\n\t\t\t\tthis->world->gameOver(GAME_OVER_ENEMY_ESCAPED_W_ARTEFACT);\n\t\t\t}\n\t\t\tthis->perceptionCheck(deltaTime);\n\t\t\treturn;\n\t\tcase OBJECTIVE_ATTACK_TURRET: // Attack turret\n\n\t\t\treturn;\n\t\tcase OBJECTIVE_ATTACK_PLAYER: // Attack player\n\t\t\tif(this->targetNode!=0)\n\t\t\t{\n\t\t\t\tfloat targetDistanceToLastKnown = glm::distance(this->targetNode->position*glm::vec3(1,1,0), this->lastKnownPlayerPosition*glm::vec3(1,1,0));\n\t\t\t\tfloat targetDistanceToEnemy = glm::distance(this->targetNode->position*glm::vec3(1,1,0), this->enemy->getPosition()*glm::vec3(1,1,0));\n\t\t\t\tif(targetDistanceToEnemy<=2.f || targetDistanceToLastKnown>=2.f)// If player moved\n\t\t\t\t{\n\t\t\t\t\tthis->targetNode = 0;\n\t\t\t\t}\n\t\t\t}\n\t\t\tif(this->targetNode==0)\n\t\t\t{\n\t\t\t\tcurrentNode = this->navigationGraph->getNearestPathNode(this->enemy->getPosition(), 1<<ASSET_WORLD_NAV_OBJ_GROUP_HIGHDETAIL);\n\t\t\t\tthis->targetNode = this->navigationGraph->getNearestPathNode(this->lastKnownPlayerPosition, 1<<ASSET_WORLD_NAV_OBJ_GROUP_HIGHDETAIL);\n\t\t\t\tfloat targetDistanceToEnemy = glm::distance(this->targetNode->position*glm::vec3(1,1,0), this->enemy->getPosition()*glm::vec3(1,1,0));\n\t\t\t\tif(targetDistanceToEnemy<=2.f)\n\t\t\t\t{\n\t\t\t\t\tthis->targetNode = 0;\n\t\t\t\t\tif(this->collectedArtefact)\n\t\t\t\t\t\tthis->currentObjective = OBJECTIVE_EXIT_MUSEUM;\n\t\t\t\t\telse\n\t\t\t\t\t\tthis->currentObjective = OBJECTIVE_LOCATE_ARTEFACT;\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\tif(currentNode!=this->targetNode)\n\t\t\t\t\tthis->pathFinder->start(currentNode, this->targetNode);\n\t\t\t}\n\t\t\tthis->perceptionCheck(deltaTime);\n\t\t\treturn;\n\t}\n}\nvoid ObjectiveManager::perceptionCheck(util::DeltaTime &deltaTime)\n{\n\tthis->perception->tick(deltaTime);\n\tthis->aimBot->tick(deltaTime);\n\tEntity *perc = this->perception->getPerceivedEntity();\n\tif(dynamic_cast<Turret *>(perc)!=0)\n\t{\n\t\tif(this->currentObjective!=OBJECTIVE_ATTACK_TURRET)\n\t\t\tthis->targetNode = 0;\n\t\tthis->targetTurret = (Turret *)this->perception->getPerceivedEntity();\n\t\tthis->currentObjective = OBJECTIVE_ATTACK_TURRET;\n\t}\n\tif(dynamic_cast<Player *>(perc)!=0)\n\t{\n\t\tif(this->currentObjective!=OBJECTIVE_ATTACK_PLAYER)\n\t\t\tthis->targetNode = 0;\n\t\tthis->lastKnownPlayerPosition = this->perception->getPerceivedEntity()->getPosition();\n\t\tthis->currentObjective = OBJECTIVE_ATTACK_PLAYER;\n\t}\n\tTurret *t = dynamic_cast<Turret *>(perc);\n\tif(t!=0)\n\t{\n\t\tthis->currentObjective = OBJECTIVE_ATTACK_TURRET;\n\t\tthis->knownTurrets.insert(t);\n\t}\n}\nvoid ObjectiveManager::render(render::RenderManager &rManager)\n{\n\tthis->aimBot->render(rManager);\n}\n" }, { "alpha_fraction": 0.6299999952316284, "alphanum_fraction": 0.6800000071525574, "avg_line_length": 11.5, "blob_id": "784b348a4fe4c1e657ecda3abc6b2c8969fb23bb", "content_id": "e507c4903e12943bc17d590a9bd4ea1aaabbf666", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 100, "license_type": "no_license", "max_line_length": 33, "num_lines": 8, "path": "/src/cpp/render/shaders/code/fuzzymodel_fragment.c", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "//fragment\n\nin vec3 vertexColor;\nout vec4 color;\n\nvoid main() {\n\tcolor = vec4(vertexColor, 1.0f);\n}\n" }, { "alpha_fraction": 0.7386698126792908, "alphanum_fraction": 0.7457327842712402, "avg_line_length": 23.27142906188965, "blob_id": "35a16bc9e317dd94408e325e811956d9e27e8201", "content_id": "345452fbfc26c913d548e1dc9d83f58f57538a89", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1699, "license_type": "no_license", "max_line_length": 152, "num_lines": 70, "path": "/src/cpp/render/RenderManager.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_RENDER_RENDERMANAGER_HPP_\n#define FYP_RENDER_RENDERMANAGER_HPP_\n\nnamespace render {\n\tnamespace shaders {\n\t\tclass ShaderProgram;\n\t}\n}\n\n#include <glm/matrix.hpp>\n#include <util/gl.h>\n#include \"render/shaders/ShaderPrograms.h\"\n#include <glm/gtc/quaternion.hpp>\n\nnamespace render {\n\n\tclass RenderManager {\n\tprivate:\n\t\tbool mDirty,vDirty,pDirty,mvDirty,vpDirty,mvpDirty;\n\t\tbool doCullFace,doDepthBuffer;\n\t\tglm::mat4 MV,VP,MVP,stackM,stackMV,stackMVP;\n\t\tint widthPx,heightPx;\n\t\tfloat widthMM,heightMM;\n\t\tshaders::ShaderProgram *shader;\n\tpublic:\n\t\tRenderManager();\n\t\tvirtual ~RenderManager();\n\n\n\t\t// Matrix\n\t\tvoid setMVPMatrix(GLuint mvpMatrixShaderLocation);\n\n\t\tglm::mat4 M,V,P;\n\n\t\tvoid markPDirty();\n\t\tvoid markVDirty();\n\t\tvoid markMDirty();\n\n\t\tvoid pushMatrixM();\n\t\tvoid popMatrixM();\n\n\t\tvoid setShaderMatricies(shaders::ShaderProgram &shaderProgram);\n\t\tshaders::ShaderProgram *useShader(int shader);\n\t\tGLint getVertexPosition();\n\t\tGLint getVertexNormal();\n\t\tGLint getVertexTexture();\n\n\t\tvoid enableDepth();\n\t\tvoid disableDepth();\n\t\tvoid enableCullFace();\n\t\tvoid disableCullFace();\n\t\tvoid enableTransparency();\n\t\tvoid disableTransparency();\n\n\t\tvoid setDimensionsPx(int widthPx, int heightPx);\n\t\tvoid setDimensionsMM(float widthMM, float heightMM);\n\t\tint getWidthPx();\n\t\tint getHeightPx();\n\t\tfloat getWidthMM();\n\t\tfloat getHeightMM();\n\n\t\t// Debug\n\t\tvoid renderDirectionVector(const glm::vec3 position, const glm::vec3 direction, const glm::vec4 color);\n\t\tvoid renderDirectionVectors(const glm::vec3 position, const glm::vec3 directionForward, const glm::vec3 direction2, const glm::vec4 direction2_color);\n\t\tvoid renderOrientation(const glm::vec3 position, const glm::quat q);\n\t};\n\n}\n\n#endif\n" }, { "alpha_fraction": 0.6919151544570923, "alphanum_fraction": 0.7029181718826294, "avg_line_length": 30.832487106323242, "blob_id": "e87fdeaaf68337287bbe403967feed6bfe557d54", "content_id": "b3594d46a350d4b05de8ca08d752761ab81f3f27", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6271, "license_type": "no_license", "max_line_length": 133, "num_lines": 197, "path": "/src/cpp/render/Font.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include <iostream>\n\n#include \"Font.hpp\"\n\nusing namespace render;\n\nFT_Library Font::library;\nbool Font::libraryInit = false;\nstatic FT_Byte courier_font[] = {\n\t#include \"cour.h\"\n};\nFont::Font(std::string fontfamily, float heightMM)\n{\n\tif(!Font::libraryInit)\n\t{\n\t\tFont::libraryInit = true;\n\t\tif(FT_Init_FreeType(&library))\n\t\t{\n\t\t\tPRINT_DEBUG(\"Missing FT library\");\n\t\t\treturn;\n\t\t}\n\t}\n\tthis->shader = shaders::ShaderProgram::getShader(SHADER_font);\n\tthis->vertexPositionAttribute = Font::shader->getShaderLocation(false, SHADERVAR_vertex_position);\n\tthis->vertexTextureAttribute = Font::shader->getShaderLocation(false, SHADERVAR_vertex_texture);\n\tthis->uniformTextColor = Font::shader->getShaderLocation(true, SHADER_font_textColor);\n\tthis->uniformTexture = Font::shader->getShaderLocation(true, SHADERVAR_material_map_Kd);\n\n\tglGenVertexArrays(1, &this->fontFaceVertexArrayObjectID);\n\tglGenTextures(256, this->fontFaceTextures);\n\tglGenBuffers(1, &this->fontFaceTextureCoordBufferID);\n\tglGenBuffers(1, &this->fontFaceTextureUVBufferID);\n\tthis->face = new FT_Face;\n\tif(FT_New_Memory_Face(this->library, courier_font, sizeof(courier_font), 0, this->face))\n\t{\n\t\tstd::cerr << \"Failed to load font face\" << std::endl;\n\t\tstd::exit(1);\n\t}\n\tthis->setHeight(heightMM);\n\tthis->setColor(0.5f, 0.f, 1.0f, 1.f);\n\tfor(int i=0;i<256;i++)\n\t\tthis->metrics[i] = {};\n}\nFont::~Font()\n{\n\t//FT_Done_FreeType(this->library);\n}\nfloat Font::getTextWidth(std::string text, render::RenderManager &rManager)\n{\n\tfloat calculatedPixelSize = this->heightMM*rManager.getHeightPx()/rManager.getHeightMM();\n\tfloat sx = rManager.getWidthMM()/rManager.getWidthPx();\n\tfloat totalWidth = 0;\n\tfor(unsigned long i=0;i<text.length(); i++)\n\t{\n\t\tchar c = text[i];\n\t\tstruct GlyphMetrics *glyph = getGlyphMetrics_NoTexture(c, calculatedPixelSize);\n\t\ttotalWidth += (glyph->width+(glyph->advanceX >> 6)) * sx;\n\t}\n\treturn totalWidth;\n}\nvoid Font::printf(std::string text, render::RenderManager &rManager)\n{\n\t// Prime Shader Program\n\trManager.useShader(SHADER_font);\n\n\tglUniform4f(Font::uniformTextColor, this->r, this->g, this->b, this->a);\n\tglUniform1i(Font::uniformTexture, 0);\n\n\n\t// Set up vertex array/buffer objects\n\tglBindVertexArray(this->fontFaceVertexArrayObjectID);\n\n\tfloat calculatedPixelSize = this->heightMM*rManager.getHeightPx()/rManager.getHeightMM();\n\tfloat x=0,y=0,vx=1,vy=1,vw=10,vh=10,sx=rManager.getWidthMM()/rManager.getWidthPx(),sy=rManager.getHeightMM()/rManager.getHeightPx();\n\trManager.enableTransparency();\n\tglActiveTexture(GL_TEXTURE0);\n\tfor(unsigned long i=0;i<text.length(); i++)\n\t{\n\t\tchar c = text[i];\n\n\t\tglActiveTexture(GL_TEXTURE0);\n\t\tglBindTexture(GL_TEXTURE_2D, this->fontFaceTextures[(int)c]);\n\t\t// (Re)Load the texture\n\t\tstruct GlyphMetrics *glyph = getGlyphMetrics(c, calculatedPixelSize);\n\t\t\n\t\t// Get the coords\n\t\tvx = x + glyph->left * sx;\n\t\tvy = y + glyph->top * sy;\n\t\tvw = glyph->width * sx;\n\t\tvh = glyph->height * sy;\n\t\tGLfloat coords[] = {\n\t\t\tvx,\t\tvy,\n\t\t\tvx,\t\tvy - vh,\n\t\t\tvx + vw,\tvy,\n\t\t\tvx + vw,\tvy - vh,\n\t\t};\n\t\tGLuint uvCoords[] = {\n\t\t\t0,0,\n\t\t\t0,1,\n\t\t\t1,0,\n\t\t\t1,1,\n\t\t};\n\t\t\n\t\tglEnableVertexAttribArray(vertexPositionAttribute);\n\t\tglBindBuffer(GL_ARRAY_BUFFER, this->fontFaceTextureCoordBufferID);\n\t\tglBufferData(GL_ARRAY_BUFFER, sizeof(coords), coords, GL_DYNAMIC_DRAW);\n\t\tglVertexAttribPointer(vertexPositionAttribute, 2, GL_FLOAT, GL_FALSE, 0, 0);\n\t\t\n\t\tglEnableVertexAttribArray(vertexTextureAttribute);\n\t\tglBindBuffer(GL_ARRAY_BUFFER, this->fontFaceTextureUVBufferID);\n\t\tglBufferData(GL_ARRAY_BUFFER, sizeof(uvCoords), uvCoords, GL_DYNAMIC_DRAW);\n\t\tglVertexAttribPointer(vertexTextureAttribute, 2, GL_UNSIGNED_INT, GL_FALSE, 0, 0);\n\t\t\n\t\tglDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n\t\t\n\t\tglDisableVertexAttribArray(vertexPositionAttribute);\n\t\tglDisableVertexAttribArray(vertexTextureAttribute);\n\t\t\n\t\tx += (glyph->advanceX >> 6) * sx;\n\t\ty += (glyph->advanceY >> 6) * sy;\n\t}\n\trManager.disableTransparency();\n}\nvoid Font::setHeight(float heightMM)\n{\n\tthis->heightMM = heightMM;\n}\nvoid Font::setColor(float r, float g, float b, float a)\n{\n\tthis->r = r;\n\tthis->b = b;\n\tthis->g = g;\n\tthis->a = a;\n}\nstruct GlyphMetrics *Font::getGlyphMetrics(char c, int calculatedPixelSize)\n{\n\tstruct GlyphMetrics *glyph = &this->metrics[(int)c];\n\t// Ensure glyph is up to date\n\tif(glyph->currentPixelSize==calculatedPixelSize)\n\t\treturn glyph;\n\tglyph->currentPixelSize = calculatedPixelSize;\n\t// Update face pixel sizes\n\tif(FT_Set_Pixel_Sizes(*this->face, 0, calculatedPixelSize))\n\t{\n\t\tstd::cerr << \"Failed to set pixel sizes for font\" << std::endl;\n\t\tstd::exit(1);\n\t}\n\tint err = FT_Load_Char(*this->face, c, FT_LOAD_RENDER);\n\tif(err)\n\t{\n\t\tstd::cerr << \"[ERR\" << err << \"] Failed to load character '\" << c << \"'\" << std::endl;\n\t\t//std::exit(1);\n\t}\n\tFT_GlyphSlot g = (*this->face)->glyph;\n\t// Load the texture\n\tglPixelStorei(GL_UNPACK_ALIGNMENT, 1);\n\tglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);\n\tglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);\n\tglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);\n\tglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);\n\tglTexImage2D(GL_TEXTURE_2D, 0, GL_RED, g->bitmap.width, g->bitmap.rows, 0, GL_RED, GL_UNSIGNED_BYTE, g->bitmap.buffer);\n\t// Update the metrics\n\tglyph->top = g->bitmap_top;\n\tglyph->left = g->bitmap_left;\n\tglyph->width = g->bitmap.width;\n\tglyph->height = g->bitmap.rows;\n\tglyph->advanceX = g->advance.x;\n\tglyph->advanceY = g->advance.y;\n\treturn metrics;\n}\nstruct GlyphMetrics *Font::getGlyphMetrics_NoTexture(char c, int calculatedPixelSize)\n{\n\tstruct GlyphMetrics *glyph = &this->metrics[(int)c];\n\t// Ensure glyph is up to date\n\tif(glyph->currentPixelSize==calculatedPixelSize)\n\t\treturn glyph;\n\t// Update face pixel sizes\n\tif(FT_Set_Pixel_Sizes(*this->face, 0, calculatedPixelSize))\n\t{\n\t\tstd::cerr << \"Failed to set pixel sizes for font\" << std::endl;\n\t\tstd::exit(1);\n\t}\n\tif(FT_Load_Char(*this->face, c, FT_LOAD_RENDER))\n\t{\n\t\tstd::cerr << \"Failed to load character\" << c << std::endl;\n\t\tstd::exit(1);\n\t}\n\tFT_GlyphSlot g = (*this->face)->glyph;\n\t// Update the metrics\n\tglyph->top = g->bitmap_top;\n\tglyph->left = g->bitmap_left;\n\tglyph->width = g->bitmap.width;\n\tglyph->height = g->bitmap.rows;\n\tglyph->advanceX = g->advance.x;\n\tglyph->advanceY = g->advance.y;\n\treturn metrics;\n}\n" }, { "alpha_fraction": 0.7356418967247009, "alphanum_fraction": 0.7373310923576355, "avg_line_length": 30.573333740234375, "blob_id": "e5f6f737cb3936d70da76bb1c62bb0611d6987b5", "content_id": "0d30316597240ac3b8df8bc0aa02f84572d51961", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2368, "license_type": "no_license", "max_line_length": 157, "num_lines": 75, "path": "/src/cpp/render/StaticModel.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_RENDER_STATICMODEL_HPP_\n#define FYP_RENDER_STATICMODEL_HPP_\n\nnamespace render {\n\tstruct FaceKey;\n\tstruct FaceKeyHasher;\n\ttypedef struct StaticModelObject StaticModelObject;\n\tclass RenderManager;\n\tclass StaticModel;\n}\n\n#include <vector>\n#include <glm/glm.hpp>\n#include \"util/AssetManager.hpp\"\n#include \"util/gl.h\"\n#include \"world/collisions/StaticMesh.hpp\"\n\nstd::ostream &operator<<(std::ostream &ost, const render::StaticModel &model);\nstd::ostream &operator<<(std::ostream &ost, const render::StaticModelObject &o);\n\nnamespace render {\n\tstruct FaceKey {\n\t\tint vertexPositionIndex;\n\t\tint vertexTextureIndex;\n\t\tint vertexNormalIndex;\n\t\tbool operator <(struct FaceKey &A) const {\n\t\t\treturn this->vertexPositionIndex<A.vertexPositionIndex && this->vertexTextureIndex<A.vertexTextureIndex && this->vertexNormalIndex<A.vertexNormalIndex;\n\t\t}\n\t\tbool operator ==(const struct FaceKey &A) const {\n\t\t\treturn this->vertexPositionIndex==A.vertexPositionIndex && this->vertexTextureIndex==A.vertexTextureIndex && this->vertexNormalIndex==A.vertexNormalIndex;\n\t\t}\n\t};\n\tstruct FaceKeyHasher {\n\t\tstd::size_t operator()(const struct render::FaceKey& k) const\n\t\t{\n\t\t\tusing std::size_t;\n\t\t\treturn ((std::hash<int>()(k.vertexPositionIndex)^ (std::hash<int>()(k.vertexTextureIndex) << 1)) >> 1) ^ (std::hash<int>()(k.vertexNormalIndex) << 1);\n\t\t}\n\t};\n\tstruct StaticModelObject {\n\t\tstd::string name;\n\t\tint mtlAsset;\n\t\tint materialId;\n\t\tbool s;\n\t\tint numPrimitives;//triangles/faces\n\t\t//int numVerticies = numPrimitives*3\n\t\tGLuint *indecies;//length = numVerticies\n\t\tGLuint indexBufferID;\n\t};\n\tclass StaticModel : public util::Asset {\n\t\tfriend class world::collisions::StaticMesh;\n\t\tfriend std::ostream &::operator<<(std::ostream &ost, const render::StaticModel &model);\n\t\tpublic:\n\t\t\tStaticModel(int assetId, std::istream &fp);\n\t\t\tvirtual ~StaticModel();\n\t\t\tvirtual void write(std::ostream &ost) const;\n\t\t\tvirtual void postload();\n\t\t\tutil::Boundaries::AABB &getBounds();\n\t\t\tvoid render(render::RenderManager &rManager, int shader);\n\t\tprivate:\n\t\t\tutil::Boundaries::AABB *bounds;\n\t\t\tint dataBufferStride;\n\t\t\tint dataBufferNormalsOffset;\n\t\t\tint dataBufferColorsOffset;\n\t\t\tint lenVertexPositions;\n\t\t\tstd::vector<GLfloat> dataBuffer;\n\t\t\tstd::list<StaticModelObject *> objects;\n\t\t\tGLuint vertexArrayID,vertexDataBufferID,tempColorBuffer;\n\n\t\t\tint temp_totalVertexCount;\n\t};\n}\n\n\n#endif\n" }, { "alpha_fraction": 0.6302974224090576, "alphanum_fraction": 0.6386616826057434, "avg_line_length": 33.26751708984375, "blob_id": "bf459f33df975261940192320d5520a8b52aa0ee", "content_id": "e83083ba59efa5935dbc2396e6c74254b79236fa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5380, "license_type": "no_license", "max_line_length": 127, "num_lines": 157, "path": "/src/cpp/render/MaterialLibrary.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"util/StreamUtils.hpp\"\n#include \"util/Globals.hpp\"\n#include \"render/DDSImage.hpp\"\n#include \"util/AssetUtils.hpp\"\n#include \"render/shaders/ShaderProgram.hpp\"\n\n#include \"render/MaterialLibrary.hpp\"\n\n#define readFloat3(v,x) do { v[0] = readFloat(x); v[1]= readFloat(x); v[2] = readFloat(x); } while(0);\n\nusing namespace render;\nusing namespace render::shaders;\nusing namespace util::StreamUtils;\n\nMaterialLibrary::MaterialLibrary(int assetId, std::istream &fp) : Asset(assetId)\n{\n\tthis->setName(readString(fp));\n\n\tint nMats = readInt(fp);\n\n\tfor(int i=0;i<nMats;i++)\n\t{\n\t\tMaterial m;\n\n\t\tm.name = readString(fp);\n\t\tm.flags = readInt(fp);\n\n\t\tif(m.flags&MATERIAL_MASK_Ka) readFloat3(m.Ka, fp);\n\t\tif(m.flags&MATERIAL_MASK_Kd) readFloat3(m.Kd, fp);\n\t\tif(m.flags&MATERIAL_MASK_Ks) readFloat3(m.Ks, fp);\n\t\tif(m.flags&MATERIAL_MASK_Tf) readFloat3(m.Tf, fp);\n\t\tif(m.flags&MATERIAL_MASK_d) m.d = readFloat(fp);\n\t\tif(m.flags&MATERIAL_MASK_Ns) m.Ns = readFloat(fp);\n\t\tif(m.flags&MATERIAL_MASK_Ni) m.Ni = readFloat(fp);\n\t\tif(m.flags&MATERIAL_MASK_illum) m.illum = readInt(fp);\n\t\tif(m.flags&MATERIAL_MASK_sharpness) m.sharpness = readInt(fp);\n\t\tif(m.flags&MATERIAL_MASK_map_Ka) m.map_Ka = readInt(fp);\n\t\tif(m.flags&MATERIAL_MASK_map_Kd) m.map_Kd = readInt(fp);\n\t\tif(m.flags&MATERIAL_MASK_map_Ks) m.map_Ks = readInt(fp);\n\t\tif(m.flags&MATERIAL_MASK_map_Ns) m.map_Ns = readInt(fp);\n\t\tif(m.flags&MATERIAL_MASK_map_d) m.map_d = readInt(fp);\n\t\tif(m.flags&MATERIAL_MASK_disp) m.disp = readInt(fp);\n\t\tif(m.flags&MATERIAL_MASK_decal) m.decal = readInt(fp);\n\t\tif(m.flags&MATERIAL_MASK_bump) m.bump = readInt(fp);\n\n\t\tthis->materials.push_back(m);\n\t}\n}\nMaterialLibrary::~MaterialLibrary()\n{\n}\nvoid MaterialLibrary::write(std::ostream &ost) const\n{\n\tost << \"[\" << this->getAssetID() << \":\" << this->getName() << \".mtl] \" << this->materials.size() << \" materials\" << std::endl;\n\tfor(const Material &m : this->materials)\n\t\tost << \"\t\" << m << std::endl;\n}\nMaterial *MaterialLibrary::getMaterial(int materialId)\n{\n\treturn &this->materials[materialId];\n}\n#define SET_SHADER_VEC3(name) do { \\\n\tint shaderLoc = shader->getShaderLocation(true, SHADERVAR_material_##name); \\\n\tif(shaderLoc!=-1) \\\n\t{ \\\n\t\tif(mat->flags&MATERIAL_MASK_##name) \\\n\t\t{ \\\n\t\t\tglUniform3f(shaderLoc, mat->name[0], mat->name[1], mat->name[2]); \\\n\t\t} \\\n\t\telse \\\n\t\t\tutil::Globals::fatalError(\"Material is missing field for shader uniform \"#name); \\\n\t} \\\n} while(0);\n#define SET_SHADER_VALUE(name, gltype) do { \\\n\tint shaderLoc = shader->getShaderLocation(true, SHADERVAR_material_##name); \\\n\tif(shaderLoc!=-1) \\\n\t{ \\\n\t\tif(mat->flags&MATERIAL_MASK_##name) \\\n\t\t{ \\\n\t\t\tglUniform##gltype(shaderLoc, mat->name); \\\n\t\t} \\\n\t\telse \\\n\t\t\tutil::Globals::fatalError(\"Material is missing field for shader uniform \"#name); \\\n\t} \\\n} while(0);\n#define SET_SHADER_TEXTURE(name) do { \\\n\tint texLoc = shader->getShaderLocation(true, SHADERVAR_material_##name); \\\n\tif(texLoc!=-1) \\\n\t{ \\\n\t\tif(mat->flags&MATERIAL_MASK_##name) \\\n\t\t{ \\\n\t\t\tglActiveTexture(GL_TEXTURE0+t); \\\n\t\t\tutil::AssetUtils::bindTexture(mat->name); \\\n\t\t\tglUniform1i(texLoc, t); \\\n\t\t\tt++; \\\n\t\t} \\\n\t\telse \\\n\t\t\tutil::Globals::fatalError(\"Material is missing field for shader uniform \"#name); \\\n\t} \\\n} while(0);\nvoid MaterialLibrary::updateShader(shaders::ShaderProgram *shader, int materialId)\n{\n\tMaterial *mat = getMaterial(materialId);\n\tint t = 0;\n\n\tSET_SHADER_VEC3(Ka);\n\tSET_SHADER_VEC3(Kd);\n\tSET_SHADER_VEC3(Ks);\n\tSET_SHADER_VEC3(Tf);\n\tSET_SHADER_VALUE(d, 1f);\n\tSET_SHADER_VALUE(Ns, 1f);\n\tSET_SHADER_VALUE(Ni, 1f);\n\tSET_SHADER_VALUE(illum, 1i);\n\tSET_SHADER_VALUE(sharpness, 1i);\n\tSET_SHADER_TEXTURE(map_Ka);\n\tSET_SHADER_TEXTURE(map_Kd);\n\tSET_SHADER_TEXTURE(map_Ks);\n\tSET_SHADER_TEXTURE(map_Ns);\n\tSET_SHADER_TEXTURE(map_d);\n\tSET_SHADER_TEXTURE(disp);\n\tSET_SHADER_TEXTURE(decal);\n\tSET_SHADER_TEXTURE(bump);\n}\nvoid MaterialLibrary::postload()\n{\n\n}\nstd::ostream &operator<<(std::ostream &ost, const render::Material &m)\n{\n\tost << \"Material \"<< m.name << \" (\";\n\tif(m.flags&MATERIAL_MASK_Ka\t\t) ost << \"Ka=(\"<< m.Ka[0] << \", \"<< m.Ka[1] << \", \"<< m.Ka[2] << \")\";\n\tif(m.flags&MATERIAL_MASK_Kd\t\t) ost << \", Kd=(\"<< m.Kd[0] << \", \"<< m.Kd[1] << \", \"<< m.Kd[2] << \")\";\n\tif(m.flags&MATERIAL_MASK_Ks\t\t) ost << \", Ks=(\"<< m.Ks[0] << \", \"<< m.Ks[1] << \", \"<< m.Ks[2] << \")\";\n\tif(m.flags&MATERIAL_MASK_Tf\t\t) ost << \", Tf=(\"<< m.Tf[0] << \", \"<< m.Tf[1] << \", \"<< m.Tf[2] << \")\";\n\tif(m.flags&MATERIAL_MASK_d \t\t) ost << \", d=\"<< m.d;\n\tif(m.flags&MATERIAL_MASK_Ns\t\t) ost << \", Ns=\"<< m.Ns;\n\tif(m.flags&MATERIAL_MASK_Ni\t\t) ost << \", Ni=\"<< m.Ni;\n\tif(m.flags&MATERIAL_MASK_illum\t\t) ost << \", illum=\"<< m.illum;\n\tif(m.flags&MATERIAL_MASK_sharpness\t) ost << \", sharpness=\"<< m.sharpness;\n\tif(m.flags&MATERIAL_MASK_map_Ka\t\t) ost << \", map_Ka=\"<< m.map_Ka;\n\tif(m.flags&MATERIAL_MASK_map_Kd\t\t) ost << \", map_Kd=\"<< m.map_Kd;\n\tif(m.flags&MATERIAL_MASK_map_Ks\t\t) ost << \", map_Ks=\"<< m.map_Ks;\n\tif(m.flags&MATERIAL_MASK_map_Ns\t\t) ost << \", map_Ns=\"<< m.map_Ns;\n\tif(m.flags&MATERIAL_MASK_map_d\t\t) ost << \", map_d=\"<< m.map_d;\n\tif(m.flags&MATERIAL_MASK_disp\t\t) ost << \", disp=\"<< m.disp;\n\tif(m.flags&MATERIAL_MASK_decal\t\t) ost << \", decal=\"<< m.decal;\n\tif(m.flags&MATERIAL_MASK_bump\t\t) ost << \", bump=\"<< m.bump;\n\treturn ost << \")\";\n}\nbool operator==(MaterialAsset &a, MaterialAsset &b)\n{\n\treturn a.assetId==b.assetId && a.materialId==b.materialId;\n}\nbool operator!=(MaterialAsset &a, MaterialAsset &b)\n{\n\treturn !(a==b);\n}\n" }, { "alpha_fraction": 0.7136498689651489, "alphanum_fraction": 0.721068263053894, "avg_line_length": 15.439023971557617, "blob_id": "b4ef85875f4dee4c063b1a93bb53a4c8de3619e7", "content_id": "c1856fe536c1971f9759628df453c8b720bd36ec", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 674, "license_type": "no_license", "max_line_length": 35, "num_lines": 41, "path": "/src/cpp/ai/path/PathCommon.h", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_AI_PATH_PATHCOMMON_H_\n#define FYP_AI_PATH_PATHCOMMON_H_\n\n#include <glm/glm.hpp>\n#include <glm/gtx/hash.hpp>\n#include <vector>\n#include \"util/DebugFlags.hpp\"\n\n#define PATHNODE_FLAG_ENTRANCE 1\n#define PATHNODE_FLAG_EXIT 2\n#define PATHNODE_FLAG_COVER 4\n#define PATHNODE_FLAG_EXHIBIT 8\n\nstruct PathNode;\n\nstruct PathNodeLink {\n\tint id;\n\tPathNode *a,*b;\n\tdouble dist;\n\tPathNode *getOther(PathNode *p)\n\t{\n\t\treturn p==a ? b : a;\n\t}\n#ifdef ENABLE_DEBUG_PATHRENDER\n\tbool path;\n#endif\n};\n\n\nstruct PathNode {\n\tint id;\n\tglm::vec3 position;\n\tint flags;\n\tstd::vector<PathNodeLink *> links;\n#ifdef ENABLE_DEBUG_PATHRENDER\n\t// Debug\n\tbool open,closed,current;\n#endif\n};\n\n#endif\n" }, { "alpha_fraction": 0.7230340838432312, "alphanum_fraction": 0.7279053330421448, "avg_line_length": 25.127273559570312, "blob_id": "29b22a12df19f6c7d3ec8024598d499584e90f44", "content_id": "e59d27121046a1605fc9b4097ed3bd5c13340a94", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1437, "license_type": "no_license", "max_line_length": 99, "num_lines": 55, "path": "/src/cpp/world/Entity.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_WORLD_ENTITY_HPP_\n#define FYP_WORLD_ENTITY_HPP_\n\nnamespace world {\n\tclass World;\n}\n\n#include <glm/gtc/quaternion.hpp>\n#include <glm/vec3.hpp>\n#include \"render/RenderManager.hpp\"\n#include \"util/DeltaTime.hpp\"\n#include \"util/Boundaries.hpp\"\n\nnamespace world {\n\tclass Entity {\n\t\tprivate:\n\t\t\tglm::vec3 position, velocity;\n\t\t\tglm::quat orientation;\n\t\t\tWorld *world;\n\t\t\tdouble spawnTime;\n\t\tprotected:\n\t\t\tint healthCycleIndex;\n\t\t\tint healthCycleBullets;\n\t\t\tdouble healthCycleDuration;\n\t\t\tdouble *lastDamageTime;\n\t\t\tutil::Boundaries::AABB *bounds;\n\n\t\t\tvoid setBulletHealth(double healthCycleDuration, int healthCycleBullets);\n\t\tpublic:\n\t\t\tbool doRender;\n\n\t\t\tEntity();\n\t\t\tvirtual ~Entity();\n\t\t\tvirtual void addedToWorld(world::World *world, double spawnTime);\n\t\t\tworld::World &getWorld();\n\t\t\tdouble getSpawnTime();\n\t\t\tvirtual void attack(double time, glm::vec3 direction, int type);\n\t\t\tvirtual void die(double time, glm::vec3 direction, int type);\n\n\t\t\t// Position and rotation functions\n\t\t\tglm::vec3 getPosition();\n\t\t\tvoid setPosition(glm::vec3 location);\n\t\t\tvoid translate(glm::vec3 location);\n\t\t\tglm::quat getOrientation();\n\t\t\tvoid setOrientation(glm::quat rotation);\n\n\t\t\tutil::Boundaries::AABB *getBounds();\n\n\t\t\tvirtual void tick(util::DeltaTime &deltaTime);\n\t\t\tvirtual void render(render::RenderManager &rManager);\n\t\t\tvoid renderDebug(render::RenderManager &rManager, bool renderPositionMarker, bool renderBounds);\n\t};\n}\n\n#endif\n" }, { "alpha_fraction": 0.6629404425621033, "alphanum_fraction": 0.6877278089523315, "avg_line_length": 29.481481552124023, "blob_id": "dfa19e4b7ed12961b46b64e697094b0ebd02a0ab", "content_id": "c59c2ba15fe459458c4716b8663762f4b6cce56c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4115, "license_type": "no_license", "max_line_length": 150, "num_lines": 135, "path": "/src/cpp/input/controls/PlayerGameControls.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"util/DeltaTime.hpp\"\n#include \"util/gl.h\"\n#include <glm/gtc/quaternion.hpp>\n#include \"world/Entity.hpp\"\n#include <iostream>\n#include \"util/AssetManager.hpp\"\n#include \"util/Boundaries.hpp\"\n\n#include \"PlayerGameControls.hpp\"\n\nusing namespace world;\nusing namespace controls;\nusing namespace collisions;\nusing namespace util::Boundaries;\n\nSphere *playerBounds;\n\nPlayerGameControls::PlayerGameControls(Entity *controlEntity, StaticMesh *world) : ControlScheme(controlEntity)\n{\n\tthis->lastCursorX = -1000;\n\tthis->lastCursorY = -1000;\n\tthis->cursorX = 0;\n\tthis->cursorY = 0;\n\tthis->cursorDeltaX = 0;\n\tthis->cursorDeltaY = 0;\n\tthis->lastCursor = false;\n\tthis->world = world;\n\t//playerBounds = new AABB(0,0,0, 0.2f,0.2f,0.2f);\n\tplayerBounds = new Sphere;\n\tplayerBounds->radius = 0.2f;\n}\nPlayerGameControls::~PlayerGameControls()\n{\n\t\n}\nvoid PlayerGameControls::tick2(render::RenderManager *rManager, util::DeltaTime &deltaTime)\n{\n\tfloat r;\n\tglm::vec3 movement = glm::vec3(0,0,0);\n\tglm::quat orientation = this->controlEntity->getOrientation();\n\n\t// Mouse controls\n\tGLFWwindow *window = glfwGetCurrentContext();\n\tglfwGetCursorPos(window, &this->cursorX, &this->cursorY);\n\tif(!this->lastCursor)\n\t{\n\t\tthis->lastCursor = true;\n\t}\n\telse\n\t{\n\t\tr = deltaTime.getTimeDelta()*2.f;\n\t\tthis->cursorDeltaX = (this->cursorX-this->lastCursorX)*r;\n\t\tthis->cursorDeltaY = (this->cursorY-this->lastCursorY)*r;\n\t\tif(this->cursorDeltaX!=0 || this->cursorDeltaY!=0)\n\t\t{\n\t\t\tglm::quat q = glm::quat(glm::vec3(0, 0, glm::radians(-this->cursorDeltaX)));\n\t\t\tglm::quat preq = glm::quat(glm::vec3(glm::radians(-this->cursorDeltaY), 0, 0));\n\t\t\torientation = q*orientation*preq;\n\t\t}\n\t}\n\tthis->lastCursorX = this->cursorX;\n\tthis->lastCursorY = this->cursorY;\n\n\t// Orientation changes\n\tr = 2*deltaTime.getTimeDelta();\n\n\t// Relative Roll\n\tif(glfwGetKey(window, GLFW_KEY_1)==GLFW_PRESS)\n\t\torientation = glm::angleAxis(r, glm::vec3( 0, 0,-1))*orientation;\n\tif(glfwGetKey(window, GLFW_KEY_3)==GLFW_PRESS)\n\t\torientation = glm::angleAxis(r, glm::vec3( 0, 0, 1))*orientation;\n\n\t// Relative Pitch\n\tif(glfwGetKey(window, GLFW_KEY_R)==GLFW_PRESS)\n\t\torientation = glm::angleAxis(r, glm::vec3(-1, 0, 0))*orientation;\n\tif(glfwGetKey(window, GLFW_KEY_V)==GLFW_PRESS)\n\t\torientation = glm::angleAxis(r, glm::vec3( 1, 0, 0))*orientation;\n\n\t// Absolute Yaw\n\tif(glfwGetKey(window, GLFW_KEY_Z)==GLFW_PRESS)\n\t\torientation *= glm::angleAxis(r, glm::vec3( 0, 1, 0));\n\tif(glfwGetKey(window, GLFW_KEY_C)==GLFW_PRESS)\n\t\torientation *= glm::angleAxis(r, glm::vec3( 0,-1, 0));\n\n\t// Movement Controls\n\tif(glfwGetKey(window, GLFW_KEY_LEFT_SHIFT)==GLFW_PRESS)\n\t\tr = 4.f*deltaTime.getTimeDelta();\n\telse\n\t\tr = 1.f*deltaTime.getTimeDelta();\n\n\t// Forward/Backward\n\tif(glfwGetKey(window, GLFW_KEY_W)==GLFW_PRESS)\n\t\tmovement += orientation*glm::vec3( 0, r, 0);\n\tif(glfwGetKey(window, GLFW_KEY_S)==GLFW_PRESS)\n\t\tmovement += orientation*glm::vec3( 0, -r, 0);\n\n\t// Left/Right\n\tif(glfwGetKey(window, GLFW_KEY_A)==GLFW_PRESS)\n\t\tmovement += orientation*glm::vec3(-r, 0, 0);\n\tif(glfwGetKey(window, GLFW_KEY_D)==GLFW_PRESS)\n\t\tmovement += orientation*glm::vec3( r, 0, 0);\n\n\t// Down/Up\n\tif(glfwGetKey(window, GLFW_KEY_Q)==GLFW_PRESS)\n\t\tmovement += orientation*glm::vec3( 0, 0,-r);\n\tif(glfwGetKey(window, GLFW_KEY_E)==GLFW_PRESS)\n\t\tmovement += orientation*glm::vec3( 0, 0, r);\n\n\t//glm::vec3 orientationEuler = glm::eulerAngles(orientation);\n\t//std::cout << glm::degrees(orientationEuler.x) << \", \" << glm::degrees(orientationEuler.y) << \", \" << glm::degrees(orientationEuler.z) << std::endl;\n\tthis->velocity += movement;\n\tthis->velocity *= 0.85;\n\tglm::vec3 a = this->controlEntity->getPosition();\n\n\tplayerBounds->center[0] = a.x;\n\tplayerBounds->center[1] = a.y;\n\tplayerBounds->center[2] = a.z;\n\n\n\tfloat f = 1.f;\n\tstd::vector<glm::vec3> normals;\n\tthis->world->collisionResponse(*playerBounds, &f, &velocity, &normals);\n\tglm::vec3 step = velocity*(f-0.001f);\n\n\tglm::vec3 v = velocity*(1.f-f);\n\tfor(auto &n : normals)\n\t{\n\t\tv -= (n*glm::dot(v, n));\n\t\tvelocity -= (n*glm::dot(velocity, n));\n\t}\n\tstep += v;\n\t\n\tthis->controlEntity->translate(step);\n\tthis->controlEntity->setOrientation(orientation);\n}\n" }, { "alpha_fraction": 0.7623318433761597, "alphanum_fraction": 0.7623318433761597, "avg_line_length": 14.928571701049805, "blob_id": "a83dba6a3c5d069c8d80e98d483e4dcbd90f0c69", "content_id": "989ce008de594a31c0f56bc5eb451fb642ff9e84", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 223, "license_type": "no_license", "max_line_length": 44, "num_lines": 14, "path": "/src/cpp/gui/elements/ControlsOption.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_GUI_ELEMENTS_CONTROLSOPTION_HPP_\n#define FYP_GUI_ELEMENTS_CONTROLSOPTION_HPP_\n\nclass ControlsOption;\n\n// Include dependencies\n\nclass ControlsOption {\n\tpublic:\n\t\tControlsOption();\n\t\t~ControlsOption();\n};\n\n#endif\n" }, { "alpha_fraction": 0.699999988079071, "alphanum_fraction": 0.699999988079071, "avg_line_length": 9, "blob_id": "872bbc38ab94762736d692927866f6504a918767", "content_id": "9f3be11bb1b4757c7c0d98de00e80719e3498d3f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 100, "license_type": "no_license", "max_line_length": 29, "num_lines": 10, "path": "/src/cpp/gui/screens/ControlsMenu.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"ControlsMenu.hpp\"\n\nControlsMenu::ControlsMenu()\n{\n\t\n}\nControlsMenu::~ControlsMenu()\n{\n\t\n}\n" }, { "alpha_fraction": 0.7488151788711548, "alphanum_fraction": 0.7488151788711548, "avg_line_length": 14.071428298950195, "blob_id": "9a0c973800903b6fda4d0cfd49e99fe538bd57da", "content_id": "8d73d3e9953ac85c90f995897d3184797a4ad3fb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 211, "license_type": "no_license", "max_line_length": 42, "num_lines": 14, "path": "/src/cpp/gui/elements/PulldownMenu.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_GUI_ELEMENTS_PULLDOWNMENU_HPP_\n#define FYP_GUI_ELEMENTS_PULLDOWNMENU_HPP_\n\nclass PulldownMenu;\n\n// Include dependencies\n\nclass PulldownMenu {\n\tpublic:\n\t\tPulldownMenu();\n\t\t~PulldownMenu();\n};\n\n#endif\n" }, { "alpha_fraction": 0.7202380895614624, "alphanum_fraction": 0.7291666865348816, "avg_line_length": 32.12676239013672, "blob_id": "58f095092054b6efb14a18cedcb1a989152342cd", "content_id": "175860867d1791f5d4faf352530e30d291c2632e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2352, "license_type": "no_license", "max_line_length": 147, "num_lines": 71, "path": "/src/cpp/ai/path/PathExecuter.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"PathExecuter.hpp\"\n\nusing namespace world;\nusing namespace util;\n\nusing namespace ai::path;\n\nPathExecuter::PathExecuter(float movementSpeed, Entity *entity, NavigationGraph *navgraph)\n{\n\tthis->navgraph = navgraph;\n\tthis->currentPathNode = -1;\n\tthis->ent = entity;\n\tthis->currentPath.clear();\n\tthis->speed = movementSpeed;\n}\nPathExecuter::~PathExecuter()\n{\n\t\n}\nvoid PathExecuter::postPath(const std::vector<int> &path)\n{\n\tthis->currentPathNode = -1;\n\tthis->currentPath = path;\n\tthis->distances = new float[this->currentPath.size()-1];\n\tthis->timeToNextNode = new float[this->currentPath.size()-1];\n\tthis->nodeTime = new float[this->currentPath.size()];\n\tthis->nodeTime[0] = 0.f;\n\tfor(unsigned long i=0;i<this->currentPath.size()-1;i++)\n\t{\n\t\tthis->distances[i] = glm::distance(this->navgraph->nodes[this->currentPath[i]].position, this->navgraph->nodes[this->currentPath[i+1]].position);\n\t\tthis->timeToNextNode[i] = this->distances[i]/this->speed;\n\t\tthis->nodeTime[i+1] = this->nodeTime[i]+this->timeToNextNode[i];\n\t}\n}\nvoid PathExecuter::tick(DeltaTime &deltaTime)\n{\n\tif(this->currentPath.size()==0)\n\t\treturn;\n\n\tif(this->currentPathNode==-1)\n\t{\n\t\tthis->currentPathNode = 0;\n\t\tthis->pathStartTime = deltaTime.getTime();\n\t}\n\tdouble timeIntoPath;\n\ttimeIntoPath = deltaTime.getTime()-this->pathStartTime;\n\tdouble nextNodeTime;\n\tnextNodeTime = this->nodeTime[this->currentPathNode+1];\n\twhile(timeIntoPath>nextNodeTime)//If we've reached or gone past the current node\n\t{\n\t\tthis->currentPathNode++;\n\t\tif((unsigned long)this->currentPathNode+1>=this->currentPath.size())\n\t\t{\n\t\t\t//this->currentPathNode = 0;\n\t\t\t//this->pathStartTime = deltaTime.getTime();\n\t\t\t//timeIntoPath = deltaTime.getTime()-this->pathStartTime;\n\t\t\tthis->currentPath.clear();\n\t\t\tthis->currentPathNode = -1;\n\t\t\treturn;\n\t\t}\n\t\tnextNodeTime = this->nodeTime[this->currentPathNode+1];\n\t}\n\t\n\tdouble currentNodeTime = this->nodeTime[this->currentPathNode];\n\tdouble nextNodeTimeDelta = this->timeToNextNode[this->currentPathNode];\n\tdouble currentTimeBetweenNodes = timeIntoPath-currentNodeTime;\n\tglm::vec3 a = this->navgraph->nodes[this->currentPath[this->currentPathNode]].position;\n\tglm::vec3 b = this->navgraph->nodes[this->currentPath[this->currentPathNode+1]].position;\n\tthis->ent->setPosition(a+(b-a)*(float)(currentTimeBetweenNodes/nextNodeTimeDelta));\n\t//this->currentPathNode++;\n}\n" }, { "alpha_fraction": 0.6528497338294983, "alphanum_fraction": 0.6580311059951782, "avg_line_length": 18.299999237060547, "blob_id": "eb6a141ba2fae42f148c3696fa0f067330f8113c", "content_id": "79a017c01948f85d12acb8db9108c7ad255910e2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 193, "license_type": "no_license", "max_line_length": 46, "num_lines": 10, "path": "/src/cpp/util/Globals.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"Globals.hpp\"\n\nusing namespace util;\n\nvoid Globals::fatalError(std::string errorMsg)\n{\n\tstd::cerr << \"FATAL ERROR!\" << std::endl;\n\tstd::cerr << errorMsg << std::endl;\n\tstd::exit(1);\n}\n" }, { "alpha_fraction": 0.6023339033126831, "alphanum_fraction": 0.6032316088676453, "avg_line_length": 28.3157901763916, "blob_id": "734287155a9163b70fc05afa8bebf0f07c5e3bb1", "content_id": "976c1667de6509e60a5d784eb9ad52c96d3e9283", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1114, "license_type": "no_license", "max_line_length": 132, "num_lines": 38, "path": "/src/cpp/util/SuperDebug.h", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef SUPERDEBUG_H_INCLUDED\n#define SUPERDEBUG_H_INCLUDED\n#ifndef DISABLE_DEBUG\n\n#include <iostream>\n#include <glm/glm.hpp>\n#include <glm/gtc/quaternion.hpp>\n#include \"util/gl.h\"\n\nextern std::ostream *debugfile;\n\nextern GLFWwindow *superdebug_window;\ninline bool DEBUG_KEY(int key)\n{\n\treturn glfwGetKey(superdebug_window, key)==GLFW_PRESS;\n}\n\n#define THREE_IOSTREAM_HEADER \"[\" << __FILE__ << \":\" << __LINE__ << \" \" << __func__ << \"()] \"\n#define PRINT_CONTROL(message, control, action) PRINT_DEBUG(message << \" (Control:\" << control << \", Action:\" << action << \")\")\n\n#define PRINT_DEBUG(msg) std::cout << THREE_IOSTREAM_HEADER << msg << std::endl;\n\ninline std::ostream &operator<<(std::ostream &ost, const glm::vec3 &a)\n{\n\treturn ost << \"(\" << a.x << \", \" << a.y << \", \" << a.z << \")\";\n}\n\ninline std::ostream &operator<<(std::ostream &ost, const glm::quat &a)\n{\n#ifdef DEBUG_QUAT\n\treturn ost << \"(Q:\" << a.x << \", \" << a.y << \", \" << a.z << \" : \" << a.w << \"; Euler:\" << glm::degrees(glm::eulerAngles(a)) << \")\";\n#else\n\treturn ost << \"(Euler:\" << glm::degrees(glm::eulerAngles(a)) << \")\";\n#endif\n}\n\n#endif\n#endif\n" }, { "alpha_fraction": 0.5595695376396179, "alphanum_fraction": 0.5649500489234924, "avg_line_length": 31.524999618530273, "blob_id": "af014cfad7224979ddd88ee9a164c9e76fb5a75e", "content_id": "5a9d463a3a855c65ab6d610f45f4adadfd7d03cf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1301, "license_type": "no_license", "max_line_length": 79, "num_lines": 40, "path": "/hooks/fix_include_guards.py", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\nimport os,re\n\ndef collapseFolders(folders):\n ap = \"\"\n for f in folders:\n ap += \"/\"+f\n return ap\n\ndef collapseFoldersMacro(folders, e):\n ap = \"\"\n for f in folders:\n ap += \"_\"+f\n return ap+\"_\"+e.replace(\".\", \"_\")+\"_\"\n\ndef mkdirs(rootDir, folders):\n os.makedirs(rootDir+collapseFolders(folders), exist_ok=True)\n\ndef processHeaderFile(rootDir, folders, e):\n with open(rootDir+collapseFolders(folders)+\"/\"+e, \"r\") as fp:\n a = fp.readlines();\n if re.search(\"^#ifndef .*$\", a[0]) and re.search(\"^#define .*$\", a[1]):\n print(\"Fixing\", rootDir+collapseFolders(folders)+\"/\"+e)\n macro = (\"FYP\"+collapseFoldersMacro(folders,e)).upper()\n a[0] = \"#ifndef \"+macro+\"\\n\"\n a[1] = \"#define \"+macro+\"\\n\"\n #mkdirs(\"./test\", folders)\n with open(rootDir+collapseFolders(folders)+\"/\"+e, \"w\") as fp2:\n fp2.writelines(a)\n\ndef processDir(rootDir, folders):\n for e in os.listdir(rootDir+collapseFolders(folders)):\n if os.path.isdir(rootDir+collapseFolders(folders)+\"/\"+e):\n processDir(rootDir, folders+[e])\n if re.search(\"^.*\\.hpp$\", e):\n processHeaderFile(rootDir, folders, e)\n\nif __name__==\"__main__\":\n processDir(\"./src\", []);\n" }, { "alpha_fraction": 0.5490797758102417, "alphanum_fraction": 0.5593047142028809, "avg_line_length": 29.310077667236328, "blob_id": "fe327f928494448f2995f4415023cca4907b3031", "content_id": "b6c63883922690d41acfb41602a8c3e1bec60240", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3912, "license_type": "no_license", "max_line_length": 127, "num_lines": 129, "path": "/hooks/asset_common.py", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\nimport gzip,re,struct,sys,os\n\nREGEX_INT = \"-?\\d+\"\nREGEX_FLOAT = \"-?\\d*\\.{0,1}\\d+\"\nREGEX_INT_CLASS = \"(\"+REGEX_INT+\")\"\nREGEX_FLOAT_CLASS = \"(\"+REGEX_FLOAT+\")\"\n\ndef exitFailure(msg):\n print(msg, file=stderr)\n sys.exit(1)\n\ndef open_sourcefile(source_filename):\n return open(source_filename, \"r\")\n\ndef close_sourcefile(source_fp):\n source_fp.close()\n\ndef open_objectfile(source_filename):\n return gzip.open(source_filename, \"wb\")\n\ndef close_objectfile(object_fp):\n object_fp.close()\n\ndef getFilePath(filepath):\n return filepath[0:filepath.rfind(\"/\")]\n\ndef getFileExtension(filename):\n return filename[filename.find(\".\")+1:]\n\ndef getFileName(filename):\n return filename[filename.rfind(\"/\")+1:filename.find(\".\")]\n\ndef getMetadata(metafile):\n assetId = 0\n materials = {}\n textures = {}\n with open(metafile, \"r\") as fp:\n for line in fp:\n line = line[:-1]\n\n if line.find(\"// addMTLMaterial\")>=0:\n m = re.search(\"// addMTLMaterial\\(\"+REGEX_INT_CLASS+\",\"+REGEX_INT_CLASS+\",(.+),(.+)\\)\", line)\n if m:\n mtlAssetID = int(m.group(1))\n materialId = int(m.group(2))\n mtlPath = m.group(3)\n materialName = m.group(4)\n materials[mtlPath+\":\"+materialName] = (mtlAssetID, materialId)\n else:\n exitFailure(\"addMTLMaterial loading failed!\\n\"+line)\n\n if line.find(\"// addTexture\")>=0:\n m = re.search(\"// addTexture\\(\"+REGEX_INT_CLASS+\",(.+)\\)\", line)\n if m:\n textureAssetId = int(m.group(1))\n texturePath = m.group(2)\n textures[texturePath] = textureAssetId\n else:\n exitFailure(\"addTexture loading failed!\\n\"+line)\n\n a = {'materials':materials, 'textures':textures}\n return a\n\ndef writeType(fp, thetype):\n tt = type(thetype)\n if tt==int:\n fp.write(struct.pack(\"i\", thetype))\n elif tt==float:\n fp.write(struct.pack(\"f\", thetype))\n elif tt==bool:\n fp.write(struct.pack(\"?\", thetype))\n elif tt==list or tt==tuple:\n for e in thetype:\n if not writeType(fp, e):\n return False\n elif tt==str:\n writeType(fp, len(thetype))\n fp.write(bytes(thetype, 'UTF-8'))\n else:\n print(\"Unknown type\", tt)\n return False\n return True\n\ndef parse1i(current, line, key):\n m = re.search(\"^\"+key+\"\\s\"+REGEX_INT_CLASS+\"$\", line);\n if not m:\n return current\n return int(m.group(1))\n\ndef parse2i(current, line, key):\n m = re.search(\"^\"+key+\"\\s\"+REGEX_INT_CLASS+\"\\s\"+REGEX_INT_CLASS+\"$\", line);\n if not m:\n return current\n return (int(m.group(1)), int(m.group(2)))\n\ndef parse1s(current, line, key):\n m = re.search(\"^\"+key+\"\\s+(.+?)$\", line);\n if not m:\n return current\n return m.group(1)\n\ndef parse1f(current, line, key):\n m = re.search(\"^\"+key+\"\\s+\"+REGEX_FLOAT_CLASS+\".*$\", line);\n if not m:\n return current\n return float(m.group(1))\n\ndef parse2f(current, line, key):\n m = re.search(\"^\"+key+\"\\s\"+REGEX_FLOAT_CLASS+\"\\s\"+REGEX_FLOAT_CLASS+\"$\", line);\n if not m:\n return current\n return (float(m.group(1)), float(m.group(2)))\n\ndef parse3f(current, line, key):\n m = re.search(\"^\"+key+\"\\s\"+REGEX_FLOAT_CLASS+\"\\s\"+REGEX_FLOAT_CLASS+\"\\s\"+REGEX_FLOAT_CLASS+\"$\", line);\n if not m:\n return current\n return (float(m.group(1)), float(m.group(2)), float(m.group(3)))\n\ndef parse1i3f(current, line, key):\n m = re.search(\"^\"+key+\"\\s\"+REGEX_INT_CLASS+\"\\s\"+REGEX_FLOAT_CLASS+\"\\s\"+REGEX_FLOAT_CLASS+\"\\s\"+REGEX_FLOAT_CLASS+\"$\", line);\n if not m:\n return current\n return (int(m.group(1)), float(m.group(2)), float(m.group(3)), float(m.group(4)))\n\nif __name__==\"__main__\":\n pass\n\n\n" }, { "alpha_fraction": 0.6308051943778992, "alphanum_fraction": 0.6555295586585999, "avg_line_length": 24.80172348022461, "blob_id": "1d4de194ce518eefdab9c5d54f96c1627da93636", "content_id": "bc610ff71bda94797f8bddb4b8f7e0145d118d79", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2993, "license_type": "no_license", "max_line_length": 104, "num_lines": 116, "path": "/src/cpp/ai/path/DebugPathHolder.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"util/gl.h\"\n#include \"render/shaders/ShaderUtils.hpp\"\n#include \"render/BasicShapes.hpp\"\n#include <glm/gtc/matrix_transform.hpp>\n#include \"render/RenderManager.hpp\"\n#include \"util/DeltaTime.hpp\"\n#include <set>\n\n#include \"ai/path/DebugPathHolder.hpp\"\n\nusing namespace render;\nusing namespace ai::path;\n\ninline struct PathNode *getNode(DebugPathHolder *h, int x, int y)\n{\n\tunsigned long i = x*50+y;\n\tif(x>=50 || y>=50 || i>=h->nodes.size())\n\t\treturn 0;\n\treturn h->nodes.at(i);\n}\ninline void linkNodes(struct PathNode *a, struct PathNode *b)\n{\n\tif(a==0 || b==0)\n\t\treturn;\n\tstruct PathNodeLink *pnLink = new struct PathNodeLink;\n\tpnLink->a = a;\n\tpnLink->b = b;\n\tpnLink->dist = glm::distance(a->position, b->position);\n\ta->links.push_back(pnLink);\n\tb->links.push_back(pnLink);\n}\n\nDebugPathHolder::DebugPathHolder()\n{\n\tint id = 0;\n\t// Create node grid\n\tfor(int x=0;x<50;x++)\n\t{\n\t\tfor(int y=0;y<50;y++)\n\t\t{\n\t\t\tstruct PathNode *node = new struct PathNode;\n\t\t\tnode->id = ++id;\n\t\t\tnode->position = glm::vec3(x*5, y*5, 0);\n\t\t\tthis->nodes.push_back(node);\n\t\t}\n\t}\n\t// Create node links\n\tfor(int x=0;x<50;x++)\n\t{\n\t\tfor(int y=0;y<50;y++)\n\t\t{\n\t\t\tif(x>20 && x<30 && y>20 && y<30)\n\t\t\t\tcontinue;\n\t\t\tstruct PathNode *a,*b;\n\t\t\ta = getNode(this, x, y);\n\t\t\tb = getNode(this, x+1, y);\n\t\t\tlinkNodes(a,b);\n\t\t\t\n\t\t\tb = getNode(this, x, y+1);\n\t\t\tlinkNodes(a,b);\n\t\t}\n\t}\n}\nDebugPathHolder::~DebugPathHolder()\n{\n\tstd::set<struct PathNodeLink *> links;\n\tfor(auto &n : this->nodes)\n\t{\n\t\tfor(auto &nLink : n->links)\n\t\t\tlinks.insert(nLink);\n\t}\n\tfor(auto &n : links)\n\t\tdelete n;\n\tfor(auto &n : this->nodes)\n\t{\n\t\tdelete n;\n\t}\n}\nvoid DebugPathHolder::render(util::DeltaTime &deltaTime, render::RenderManager &rManager)\n{\n\tglEnable(GL_BLEND);\n\tshaders::ShaderProgram *shader = shaders::ShaderProgram::getShader(SHADER_solidColor);\n\tGLint vploc = shader->getShaderLocation(false, SHADERVAR_vertex_position);\n\tGLint loc = shader->getShaderLocation(false, SHADER_solidColor_solidColor);\n\tfor(std::vector<struct PathNode *>::iterator it = this->nodes.begin(); it != this->nodes.end(); it++)\n\t{\n\t\tstruct PathNode *node = *it;\n\t\t// Draw all node links\n\t\tfor(std::vector<struct PathNodeLink *>::iterator j = node->links.begin(); j != node->links.end(); j++)\n\t\t{\n\t\t\tstruct PathNodeLink *nodeLnk = *j;\n\t\t\trManager.M = glm::mat4(1.0f);\n\t\t\trManager.markMDirty();\n\t\t\trManager.setShaderMatricies(*shader);\n\t\t\tglUniform4f(loc, 0.0f, 0.f, 0.4f, 1.f);\n\t\t\tBasicShapes::drawLine(nodeLnk->a->position, nodeLnk->b->position, vploc);\n\t\t}\n\t\t// Draw a point for the node\n\t\trManager.useShader(SHADER_solidColor);\n\t\trManager.M = glm::translate(glm::mat4(1.0f), node->position);\n\t\trManager.markMDirty();\n\t\trManager.setShaderMatricies(*shader);\n\t\tif(node->current)\n\t\t\tglUniform4f(loc, 1.0f, 0.f, 0.f, 1.f);\n\t\telse if(node->closed)\n\t\t\tglUniform4f(loc, 1.0f, 1.f, 0.f, 1.f);\n\t\telse if(node->open)\n\t\t\tglUniform4f(loc, 0.0f, 1.f, 0.f, 1.f);\n\t\telse\n\t\t\tglUniform4f(loc, 0.0f, 0.f, 1.f, 1.f);\n\n\t\tBasicShapes::drawPoint(3,vploc);\n\t\t\n\t}\n\tglDisable(GL_BLEND);\n}\n" }, { "alpha_fraction": 0.6254125237464905, "alphanum_fraction": 0.6386138796806335, "avg_line_length": 25.34782600402832, "blob_id": "e0533e345b6cbdb4b06f8dd864cddd3ff60db271", "content_id": "c125411e7ab86cd864e57ce41ec7f4e4e0723f92", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 606, "license_type": "no_license", "max_line_length": 179, "num_lines": 23, "path": "/runloop.sh", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#!/usr/bin/env zsh\n\nbinary=MuseumGuard\n\nif [ $# -gt 0 ]; then\n\trunloop_target=$1\nfi\n\ntmux rename-window \"Run\"\n\nwhile true; do\n\tclear\n\tmake -j 8\n\tMAKE_RC=$?\n\tif [ -n \"$runloop_target\" ]; then\n\t\t[ $MAKE_RC -eq 0 ] && rsync assets.gz $binary charles@sandybridge:~/in/ && ssh -t \"$runloop_target\" 'cd ~/in; DISPLAY=:0.0 gdb -q --eval-command=run --eval-command=quit $binary'\n\telif [ \"$runloop_target\" = \"primusrun\" ]; then\n\t\t[ $MAKE_RC -eq 0 ] && primusrun gdb -q --eval-command=run --eval-command=quit $binary\n\telse\n\t\t[ $MAKE_RC -eq 0 ] && gdb -q --eval-command=run --eval-command=quit $binary\n\tfi\n\tread\ndone\n" }, { "alpha_fraction": 0.6666666865348816, "alphanum_fraction": 0.6666666865348816, "avg_line_length": 8, "blob_id": "b18ebe1a82665293c2bf38b0111fb8b730df7272", "content_id": "3e25d17729473d6cfced540c44a90dfe0b5f52be", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 90, "license_type": "no_license", "max_line_length": 25, "num_lines": 10, "path": "/src/cpp/ai/path/PathHelper.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"PathHelper.hpp\"\n\nPathHelper::PathHelper()\n{\n\t\n}\nPathHelper::~PathHelper()\n{\n\t\n}\n" }, { "alpha_fraction": 0.5979899764060974, "alphanum_fraction": 0.6582914590835571, "avg_line_length": 17.090909957885742, "blob_id": "63d663304e2b2a578743f46e4261d7848ec3b1ce", "content_id": "83f231b675a768d0575f99d6bd634580e7ddd990", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 199, "license_type": "no_license", "max_line_length": 56, "num_lines": 11, "path": "/src/cpp/render/shaders/code/UVTest_fragment.c", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "//fragment\n\nin vec2 UV;\nimport uniform sampler2D material_map_Kd;\n\nout vec4 color;\n\nvoid main() {\n\tcolor = vec4(texture( material_map_Kd, UV ).rgb, 1.0f);\n\t//color = vec4(vec3(UV.rg, 0.0f), 1.0f);\n}\n" }, { "alpha_fraction": 0.6854663491249084, "alphanum_fraction": 0.7158351540565491, "avg_line_length": 27.8125, "blob_id": "0ac74c2cb206ad1736815fffe419b0825a1d2a4b", "content_id": "273844e8b85c63d454495821f38571856eaad1c4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 461, "license_type": "no_license", "max_line_length": 112, "num_lines": 16, "path": "/src/cpp/util/QuaternionUtils.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_UTIL_QUATERNIONUTILS_HPP_\n#define FYP_UTIL_QUATERNIONUTILS_HPP_\n\n#include <glm/glm.hpp>\n#include \"util/gl.h\"\n\nnamespace util {\n\tnamespace QuaternionUtils {\n\t\tglm::quat rotationBetweenVectors(const glm::vec3 &u0, const glm::vec3 &u1);\n\t\tglm::quat rotationBetween(const glm::vec3 &u0, const glm::vec3 &v0, const glm::vec3 &u1, const glm::vec3 &v1);\n\t\tglm::vec3 rotate(glm::vec3 v, glm::quat q);\n\t\tvoid calculateQuaternionW(glm::quat &q);\n\t}\n}\n\n#endif\n" }, { "alpha_fraction": 0.699999988079071, "alphanum_fraction": 0.699999988079071, "avg_line_length": 9, "blob_id": "0ec2610f78e62ad8eaf873aa97557d6ffd197ee9", "content_id": "d5d58a49a3c9fa34737f312e50710124f6f7a319", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 100, "license_type": "no_license", "max_line_length": 29, "num_lines": 10, "path": "/src/cpp/gui/screens/GraphicsMenu.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"GraphicsMenu.hpp\"\n\nGraphicsMenu::GraphicsMenu()\n{\n\t\n}\nGraphicsMenu::~GraphicsMenu()\n{\n\t\n}\n" }, { "alpha_fraction": 0.6712328791618347, "alphanum_fraction": 0.698630154132843, "avg_line_length": 15.84615421295166, "blob_id": "c622cc848a19005db6fc0ab7abd74e2aa3e99bd5", "content_id": "8b3b14ba1bdd635c9ea5d057133fa45c7e9876f9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 219, "license_type": "no_license", "max_line_length": 56, "num_lines": 13, "path": "/src/cpp/render/shaders/code/UVTest_vertex.c", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "//vertex\n\nimport in vec3 vertex_position;\nimport in vec2 vertex_texture;\n\nout vec2 UV;\n\nimport uniform mat4 matrix_MVP;\n\nvoid main() {\n gl_Position = matrix_MVP * vec4(vertex_position, 1);\n UV = vertex_texture;\n}\n" }, { "alpha_fraction": 0.6566163897514343, "alphanum_fraction": 0.6566163897514343, "avg_line_length": 26.136363983154297, "blob_id": "1ad5037584c8164149f2cfefc3d879312c803dde", "content_id": "417f55ead741dc6784eca19ef79fb634360970b0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 597, "license_type": "no_license", "max_line_length": 91, "num_lines": 22, "path": "/src/cpp/util/Console.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include <locale>\n\n#include \"Console.hpp\"\n\nvoid Console::println(int index, std::wstring text)\n{\n\tstd::wstring_convert<convert_type, wchar_t> converter;\n\tstd::cout << \"[\" << std::to_string(index) << \"]\" << converter.to_bytes(text) << std::endl;\n}\nvoid Console::print(int index, std::wstring text)\n{\n\tstd::wstring_convert<convert_type, wchar_t> converter;\n\tstd::cout << converter.to_bytes(text);\n}\nvoid Console::println(int index, std::string text)\n{\n\tstd::cout << \"[\" << std::to_string(index) << \"]\" << text << std::endl;\n}\nvoid Console::print(int index, std::string text)\n{\n\tstd::cout << text;\n}\n" }, { "alpha_fraction": 0.7352941036224365, "alphanum_fraction": 0.737889289855957, "avg_line_length": 29.421052932739258, "blob_id": "831e5fb48a5674c47b1531f9d8306f630491a283", "content_id": "7aee1080a3bdfab09b617d9a23aefd4273b69cb3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1156, "license_type": "no_license", "max_line_length": 139, "num_lines": 38, "path": "/src/cpp/ai/PerceptionManager.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_AI_PERCEPTIONMANAGER_HPP_\n#define FYP_AI_PERCEPTIONMANAGER_HPP_\n\n#include \"world/Entity.hpp\"\n#include \"util/DeltaTime.hpp\"\n#include <set>\n\n/*\n * Enemy: Turret, Player\n * Turret: Enemy\n * SecurityCamera: Enemy\n */\n\nnamespace ai {\n\tclass PerceptionManager;\n\tclass PerceptionManager {\n\t\tprotected:\n\t\t\t//current = the current orientation, target = the target orientation, min/max = boundaries; all in model space\n\t\t\tfloat currentYaw,currentPitch,targetYaw,targetPitch,maxYaw,minYaw,targetDistance;\n\t\t\tworld::Entity *targetEntity,*perceivedEntity;\n\t\t\tglm::vec3 offset_modelSpace;\n\t\t\tworld::Entity *controlEntity;\n\t\t\tstd::set<const std::type_info *> searchTypes;\n\t\tpublic:\n\t\t\tPerceptionManager(world::Entity *controlEntity, std::initializer_list<const std::type_info *> searchTypes, glm::vec3 offset_modelSpace);\n\t\t\t~PerceptionManager();\n\t\t\tvoid setYawBounds(float minYaw, float maxYaw);\n\t\t\tvoid tick(util::DeltaTime &deltaTime);\n\t\t\tworld::Entity *getOriginEntity();\n\t\t\tworld::Entity *getTargetEntity();\n\t\t\tworld::Entity *getPerceivedEntity();\n\t\t\tglm::vec3 getEyePosition();\n\t\t\tfloat getTargetDistance();\n\t\t\tglm::quat getOrientation();\n\t};\n}\n\n#endif\n" }, { "alpha_fraction": 0.7231082320213318, "alphanum_fraction": 0.7353240847587585, "avg_line_length": 26.542055130004883, "blob_id": "1464a4ba30a7e3303c7c0bcb052211839fd896a4", "content_id": "3e2a6d8f39aaaf5daa3cd70e52143049299de876", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2947, "license_type": "no_license", "max_line_length": 114, "num_lines": 107, "path": "/tmp/SkeletalModel.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef __SKELETALMODEL_H_INCLUDED__\n#define __SKELETALMODEL_H_INCLUDED__\n\n#define ENABLE_DEBUG_RENDER_MD5JOINT\n\n#include <vector>\n\nnamespace render{\n\tclass SkeletalModel;\n\ttypedef struct MD5Joint MD5Joint;\n\ttypedef struct MD5Bone MD5Bone;\n\ttypedef struct MD5Vertex MD5Vertex;\n\ttypedef struct MD5Primitive MD5Primitive;\n\ttypedef struct MD5Weight MD5Weight;\n\ttypedef struct MD5Mesh MD5Mesh;\n\ttypedef std::vector<MD5Bone> Skeleton;\n}\n\n#include <string>\n#include <fstream>\n#include <list>\n#include <glm/glm.hpp>\n#include <glm/gtc/quaternion.hpp>\n#include \"util/AssetManager.hpp\"\n#include \"render/MaterialLibrary.hpp\"\n#ifdef ENABLE_DEBUG_RENDER_MD5JOINT\n#include \"render/RenderManager.hpp\"\n#endif\n\nstd::ostream &operator<<(std::ostream &ost, const render::SkeletalModel &model);\nstd::ostream &operator<<(std::ostream &ost, const render::MD5Joint &joint);\nstd::ostream &operator<<(std::ostream &ost, const render::MD5Mesh &mesh);\nstd::ostream &operator<<(std::ostream &ost, const render::MD5Vertex &vert);\nstd::ostream &operator<<(std::ostream &ost, const render::MD5Weight &weight);\n\nnamespace render {\n\n\tstruct MD5Joint {\n\t\tint index;\n\t\tstd::string name;\n\t\tint parent;\n\t};\n\n\tstruct MD5Bone {\n\t\tglm::vec3 pos;\n\t\tglm::quat ori;\n\t};\n\n\tstruct MD5Vertex {\n\t\tint index;\n\t\tint startWeight;\n\t\tint countWeight;\n\t};\n\n\tstruct MD5Weight {\n\t\tint index;\n\t\tint joint;\n\t\tfloat bias;\n\t\tglm::vec3 pos;\n\t};\n\n\tstruct MD5Mesh {\n\t\tint mtlAssetId;\n\t\tint materialId;\n\t\tstd::vector<MD5Vertex> verts;\n\t\tstd::vector<GLfloat> textureUVs;\n\t\tstd::vector<GLuint> indecies;\n\t\tstd::vector<MD5Weight> weights;\n\t\tGLuint vertexBufferID,indexBufferID,vertexTextureBufferID;\n\t\tGLuint vertexColorBufferID;//debug\n\t};\n\n\tvoid calculateQuaternionW(glm::quat &q);\n\n\t//joints.append((name, parent, pos, ori))\n\t//meshes.append((shader_name, numverts, verts, numtris, tris, numweights, weights))\n\n\t//verts.append((vertIndex, tex, startWeight, countWeight))\n\t//tris.append((triIndex, vertIndex0, vertIndex1, vertIndex2))\n\t//weights.append((weightIndex, joint, bias, pos))\n\n\n\tclass SkeletalModel : public util::Asset {\n\t\tpublic:\n\t\t\tSkeletalModel(int assetId, std::istream &fp);\n\t\t\tvirtual ~SkeletalModel();\n\t\t\tvoid render();\n\t\t\tvirtual void write(std::ostream &ost) const;\n\t\t\tvirtual void postload();\n//#ifdef ENABLE_DEBUG_RENDER_MD5JOINT\n\t\t\tvoid renderSkeleton(render::RenderManager &manager, const Skeleton &skeleton);\n\t\t\tvoid renderWeights(render::RenderManager &manager, const Skeleton &skeleton);\n//#endif\n\t\t\tvoid render(render::RenderManager &manager);\n\t\t\tvoid render(render::RenderManager &manager, const Skeleton &skeleton);\n\t\t\tvoid render(render::RenderManager &manager, const Skeleton &skeleton, MD5Mesh &mesh, const Material *material);\n\t\t//private:\n\t\t\tstd::vector<MD5Joint> joints;\n\t\t\tstd::vector<MD5Bone> bindPoseSkeleton;\n\t\t\tstd::list<MD5Mesh> meshes;\n\t\t\tGLuint vertexArrayID;\n\t\tfriend std::ostream &::operator<<(std::ostream &ost, const render::SkeletalModel &model);\n\t};\n}\n\n\n#endif\n" }, { "alpha_fraction": 0.5143546462059021, "alphanum_fraction": 0.5208685398101807, "avg_line_length": 33.831932067871094, "blob_id": "e56c5eabedf8a4473f3b6f009b37bdf1099b1dfd", "content_id": "8f5b2f12cbda998a605b0812a8b6b727fd55e3a6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4145, "license_type": "no_license", "max_line_length": 110, "num_lines": 119, "path": "/hooks/asset_convert.py", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\nimport sys\n\nfrom asset_parsers import *\nfrom asset_common import *\nfrom asset_convert_image import convertImage\n\n\nif __name__==\"__main__\":\n global_meta_file = \"\"\n verbose = 0\n metaFile = \"\"\n srcFile = \"\"\n objFile = \"\"\n depFile = \"\"\n\n for i in range(len(sys.argv)):\n arg = sys.argv[i]\n if arg==\"--global-meta\":\n global_meta_file = sys.argv[i+1]\n elif arg==\"--meta\":\n metaFile = sys.argv[i+1]\n elif arg==\"--src\":\n srcFile = sys.argv[i+1]\n elif arg==\"--depend\":\n depFile = sys.argv[i+1]\n elif arg==\"--out\":\n objFile = sys.argv[i+1]\n elif arg==\"--verbose\" or arg==\"-v\":\n verbose = int(sys.argv[i+1])\n\n srcFileExt = getFileExtension(srcFile)\n meta = getMetadata(global_meta_file)\n srcFilePath = getFilePath(srcFile)\n srcFileName = srcFile[srcFile.rfind(\"/\")+1:]\n\n objFileP = open_objectfile(objFile)\n\n result = True\n\n try:\n\n if srcFileExt==\"png\" or srcFileExt==\"tga\" or srcFileExt==\"jpg\":\n objFileP.write(bytes([4]))\n result = writeType(objFileP, getFileName(srcFile))\n convertImage(srcFile, objFileP)\n else:\n if srcFileExt==\"mtl\":\n parseFunc = parseMTL\n if srcFileExt==\"obj\":\n parseFunc = parseOBJ\n if srcFileExt==\"md5mesh\":\n parseFunc = parseMD5Mesh\n if srcFileExt==\"md5anim\":\n parseFunc = parseMD5Anim\n if srcFileExt==\"nav.obj\":\n parseFunc = parseNAVOBJ\n\n srcFileP = open_sourcefile(srcFile)\n fileId,fileMeta,fileData,fileDepend = parseFunc(srcFilePath, srcFileName, srcFileP, meta, verbose)\n \n if fileDepend!=None and len(fileDepend)>0:\n depFileP = open(depFile, \"w\")\n depFileP.write(srcFile+\":\")\n for f in fileDepend:\n #print(f)\n depFileP.write(\" \"+f)\n depFileP.write(\"\\n\")\n for f in fileDepend:\n depFileP.write(f+\":\\n\")\n depFileP.close()\n if fileMeta!=None and len(fileMeta)>0:\n with open(metaFile, \"w\") as fp:\n for key in fileMeta[\"macros\"]:\n print(\"#define \"+key+\" \"+str(fileMeta[\"macros\"][key]), file=fp)\n\n objFileP.write(bytes([fileId]))\n writeType(objFileP, getFileName(srcFile))\n result = writeType(objFileP, fileData)\n\n\n #if ext==\"mtl\": parseFunc = parseMTL\n # mtl = parseMTL\n # object_fp.write(bytes([0]))\n # result = writeType(object_fp, mtl)\n #elif ext==\"obj\":\n # mtl = parseOBJ(filepath, filename, source_fp, meta, verbose)\n # object_fp.write(bytes([1]))\n # writeType(object_fp, getFileName(fileToConvert))\n # result = writeType(object_fp, mtl)\n #elif ext==\"md5mesh\":\n # mtl = parseMD5Mesh(filepath, filename, source_fp, meta, verbose)\n # object_fp.write(bytes([2]))\n # writeType(object_fp, getFileName(fileToConvert))\n # result = writeType(object_fp, mtl)\n #elif ext==\"md5anim\":\n # mtl = parseMD5Anim(filepath, filename, source_fp, meta, verbose)\n # object_fp.write(bytes([3]))\n # writeType(object_fp, getFileName(fileToConvert))\n # result = writeType(object_fp, mtl)\n ## 4: Image\n #elif ext==\"nav.obj\":\n # mtl = parseNAVOBJ(filepath, filename, source_fp, meta, verbose)\n # object_fp.write(bytes([5]))\n # writeType(object_fp, getFileName(fileToConvert))\n # result = writeType(object_fp, mtl)\n\n\n srcFileP.close()\n objFileP.close()\n except Exception as e:\n raise e\n print(\"Exception:\",e)\n result = False\n if result==False:\n print(\"Critical error!\")\n os.remove(fileToPlace)\n sys.exit(1)\n" }, { "alpha_fraction": 0.6672666072845459, "alphanum_fraction": 0.7142857313156128, "avg_line_length": 22.643617630004883, "blob_id": "b217d53961565cbfb08228a8ff5a7e0a3456f72c", "content_id": "f5cc6d16de42137df6406039045c81768343db84", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4445, "license_type": "no_license", "max_line_length": 109, "num_lines": 188, "path": "/src/cpp/render/BasicShapes.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include <glm/gtc/matrix_transform.hpp>\n#include <iostream>\n\n#include \"BasicShapes.hpp\"\n\nusing namespace render;\n\nGLuint BasicShapes::unitMeshArrayID = 0;\nGLuint BasicShapes::unitMeshVertexPositionBufferID = 0;\nGLuint BasicShapes::unitSquareIndexBufferID = 0;\nGLuint BasicShapes::unitCubeIndexBufferID = 0;\nGLuint BasicShapes::unitCubeFrameIndexBufferID = 0;\nGLuint BasicShapes::lineVertexArrayID = 0;\nGLuint BasicShapes::lineVertexBufferID = 0;\n\nvoid BasicShapes::init()\n{\n\tglGenVertexArrays(1, &BasicShapes::unitMeshArrayID);\n\tglGenBuffers(1, &BasicShapes::unitMeshVertexPositionBufferID);\n\tglGenBuffers(1, &BasicShapes::unitSquareIndexBufferID);\n\tglGenBuffers(1, &BasicShapes::unitCubeIndexBufferID);\n\tglGenBuffers(1, &BasicShapes::unitCubeFrameIndexBufferID);\n\n\tglGenVertexArrays(1, &BasicShapes::lineVertexArrayID);\n\tglGenBuffers(1, &BasicShapes::lineVertexBufferID);\n\t\n\tGLubyte unitMesh[] = {\n\t\t// Unit square/Close face of Cube\n\t\t0,\t0,\t0,// 0\n\t\t0,\t1,\t0,// 1\n\t\t1,\t1,\t0,// 2\n\t\t1,\t0,\t0,// 3\n\t\t// Far face of Cube\n\t\t0,\t0,\t1,// 4\n\t\t0,\t1,\t1,// 5\n\t\t1,\t1,\t1,// 6\n\t\t1,\t0,\t1,// 7\n\t\t/*\n\t\t * 9 10\n\t\t * 8 11\n\t\t * | |\n\t\t * 5 6\n\t\t * 4 7\n\t\t *\n\t\t *\n\t\t */\n\t\t\n\t};\n\t\n\tglBindVertexArray(BasicShapes::unitMeshArrayID);\n\tglBindBuffer(GL_ARRAY_BUFFER, BasicShapes::unitMeshVertexPositionBufferID);\n\tglBufferData(GL_ARRAY_BUFFER, sizeof(unitMesh), unitMesh, GL_STATIC_DRAW);\n\t\n\tGLubyte unitSquareIndicies[] = {\n\t\t0,1,2,\n\t\t0,2,3\n\t};\n\tglBindBuffer(GL_ELEMENT_ARRAY_BUFFER, BasicShapes::unitSquareIndexBufferID);\n\tglBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(unitSquareIndicies), unitSquareIndicies, GL_STATIC_DRAW);\n\n\tGLubyte unitCubeFrameIndecies[] = {\n\t\t// Front\n\t\t0,1,\n\t\t0,3,\n\t\t2,1,\n\t\t2,3,\n\t\t// Back\n\t\t4,5,\n\t\t4,7,\n\t\t6,5,\n\t\t6,7,\n\t\t// Depth\n\t\t0,4,\n\t\t1,5,\n\t\t2,6,\n\t\t3,7\n\t};\n\tglBindBuffer(GL_ELEMENT_ARRAY_BUFFER, BasicShapes::unitCubeFrameIndexBufferID);\n\tglBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(unitCubeFrameIndecies), unitCubeFrameIndecies, GL_STATIC_DRAW);\n\t\n\tGLubyte unitCubeIndicies[] = {\n\t\t// Back\n\t\t7,6,5,\n\t\t7,5,4,\n\t\t// Right side\n\t\t3,2,6,\n\t\t3,6,7,\n\t\t// top\n\t\t1,5,6,\n\t\t1,6,2,\n\t\t// Closest face\n\t\t0,1,2,\n\t\t0,2,3,\n\t\t// Left side\n\t\t4,5,1,\n\t\t4,1,0,\n\t\t// bottom\n\t\t4,0,3,\n\t\t4,3,7,\n/*\n * 0\n * 1\n * 2\n * 3\n * 4 0\n * 5 1\n * 6 2\n * 7 3\n * 8 4\n * 9 5\n * 10 6\n * 11 7\n * /\n\n\n\t\t// Back\n\t\t11,10,9,\n\t\t11,9,8,\n\t\t// Right side\n\t\t7,6,10,\n\t\t7,10,11,\n\t\t// top\n\t\t5,9,10,\n\t\t5,10,6,\n\t\t// Closest face\n\t\t4,5,6,\n\t\t4,6,7,\n\t\t// Left side\n\t\t8,9,5,\n\t\t8,5,4,\n\t\t// bottom\n\t\t8,4,7,\n\t\t8,7,11,\n\t\t*/\n\t};\n\tglBindBuffer(GL_ELEMENT_ARRAY_BUFFER, BasicShapes::unitCubeIndexBufferID);\n\tglBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(unitCubeIndicies), unitCubeIndicies, GL_STATIC_DRAW);\n}\nvoid BasicShapes::renderUnitSquare(GLuint vertexPositionPointer)\n{\n\tglEnableVertexAttribArray(vertexPositionPointer);\n\tbindUnitMesh(vertexPositionPointer);\n\tglBindBuffer(GL_ELEMENT_ARRAY_BUFFER, unitSquareIndexBufferID);\n\tglDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, (void *)0);\n}\nvoid BasicShapes::renderUnitCube(GLuint vertexPositionPointer)\n{\n\tglEnableVertexAttribArray(vertexPositionPointer);\n\tbindUnitMesh(vertexPositionPointer);\n\tglBindBuffer(GL_ELEMENT_ARRAY_BUFFER, unitCubeIndexBufferID);\n\tglDrawElements(GL_TRIANGLES, 36, GL_UNSIGNED_BYTE, (void *)0);\n}\nvoid BasicShapes::renderUnitCubeFrame(GLuint vertexPositionPointer)\n{\n\tglEnableVertexAttribArray(vertexPositionPointer);\n\tbindUnitMesh(vertexPositionPointer);\n\tglBindBuffer(GL_ELEMENT_ARRAY_BUFFER, unitCubeFrameIndexBufferID);\n\tglDrawElements(GL_LINES, 24, GL_UNSIGNED_BYTE, (void *)0);\n}\nvoid BasicShapes::bindUnitMesh(GLuint vertexPositionPointer)\n{\n\tglBindVertexArray(unitMeshArrayID);\n\t\n\tglBindBuffer(GL_ARRAY_BUFFER, unitMeshVertexPositionBufferID);\n\tglVertexAttribPointer(vertexPositionPointer, 3, GL_UNSIGNED_BYTE, GL_FALSE, 0, 0);\n}\nvoid BasicShapes::drawLine(glm::vec3 start, glm::vec3 end, GLuint vertexPositionPointer)\n{\n\tGLfloat data[] = {\n\t\tstart.x, start.y, start.z,\n\t\tend.x, end.y, end.z\n\t};\n\tglBindVertexArray(BasicShapes::lineVertexArrayID);\n\n\tglBindBuffer(GL_ARRAY_BUFFER, BasicShapes::lineVertexBufferID);\n\tglBufferData(GL_ARRAY_BUFFER, sizeof(data), data, GL_DYNAMIC_DRAW);\n\n\tglEnableVertexAttribArray(vertexPositionPointer);\n\tglVertexAttribPointer(vertexPositionPointer, 3, GL_FLOAT, GL_FALSE, 0, 0);\n\n\tglDrawArrays(GL_LINES, 0, 2);\n}\nvoid BasicShapes::drawPoint(float size, GLuint vertexPositionPointer)\n{\n\tglPointSize(size);\n\tbindUnitMesh(vertexPositionPointer);\n\tglDrawArrays(GL_POINTS, 0, 1);\n}\n" }, { "alpha_fraction": 0.685724675655365, "alphanum_fraction": 0.6886380314826965, "avg_line_length": 22.47008514404297, "blob_id": "2d7d2ef83f455914691fe02e05f57dc0ab5f4240", "content_id": "0508cc5b75e848400962013848a09d2d06a7449e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2746, "license_type": "no_license", "max_line_length": 134, "num_lines": 117, "path": "/src/cpp/gui/ScreenManager.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"gui/Screen.hpp\"\n#include <glm/glm.hpp>\n#include \"input/Controls.hpp\"\n\n#include \"ScreenManager.hpp\"\n\nScreenManager::ScreenManager()\n{\n\tthis->width = 0;\n\tthis->height = 0;\n\tthis->lastCursorX = -1;\n\tthis->lastCursorY = -1;\n}\nScreenManager::~ScreenManager()\n{\n\twhile(!this->screens.empty())\n\t{\n\t\tScreen *screen = this->screens.front();\n\t\tdelete screen;\n\t\tthis->screens.pop_front();\n\t}\n}\nvoid ScreenManager::openRootScreen(Screen *screen)\n{\n\twhile(!this->screens.empty())\n\t{\n\t\tScreen *screen = this->screens.front();\n\t\tdelete screen;\n\t\tthis->screens.pop_front();\n\t}\n\tscreen->manager = this;\n\tthis->screens.push_back(screen);\n\tthis->onSurfaceScreenChanged(screen);\n}\nvoid ScreenManager::openScreen(Screen *screen)\n{\n\tscreen->manager = this;\n\tthis->screens.push_back(screen);\n\tthis->onSurfaceScreenChanged(screen);\n}\nvoid ScreenManager::close()\n{\n\n}\nbool ScreenManager::isScreenSurface(Screen *screen)\n{\n\treturn this->screens.back()==screen;\n}\nbool ScreenManager::isScreenBase(Screen *screen)\n{\n\treturn this->screens.front()==screen;\n}\nScreen *ScreenManager::closeScreen(Screen *screen)\n{\n\tif(this->screens.size()<=1)\n\t\treturn 0;\n\tScreen *s = this->screens.back();\n\tif(screen!=0 && s!=screen)\n\t\treturn 0;\n\tthis->screens.pop_back();\n\tthis->onSurfaceScreenChanged(this->screens.front());\n\treturn s;\n}\n#include <iostream>\n#include <typeinfo>\nbool ScreenManager::onControlEvent(int control, int action)\n{\n\t//std::cout << \"EVENT: control=\" << control << \", action=\" << action << \", mask=\" << (action&CONTROL_KEYACTION_RELEASE) << std::endl;\n\tfor(std::list<Screen *>::reverse_iterator it = this->screens.rbegin(); it!=this->screens.rend(); ++it)\n\t{\n\t\tScreen *screen = *it;\n\t\tif(screen->onControlEvent(control, action))\n\t\t{\n\t\t\treturn true;\n\t\t}\n\t}\n\treturn false;\n}\nbool ScreenManager::onControlEvent(int control, double x, double y, double dx, double dy)\n{\n\tfor(std::list<Screen *>::reverse_iterator it = this->screens.rbegin(); it!=this->screens.rend(); it++)\n\t{\n\t\tScreen *screen = *it;\n\t\tif(screen->onControlEvent(control, x, y, dx, dy))\n\t\t\treturn true;\n\t}\n\tthis->lastCursorX = x;\n\tthis->lastCursorY = y;\n\treturn false;\n}\nvoid ScreenManager::onScreenResize()\n{\n\tfor(std::list<Screen *>::iterator it = this->screens.begin(); it!=this->screens.end(); ++it)\n\t{\n\t\tScreen *screen = *it;\n\t\tscreen->onScreenResize();\n\t}\n}\nvoid ScreenManager::onSurfaceScreenChanged(Screen *screen)\n{\n}\nvoid ScreenManager::render(util::DeltaTime &deltaTime, render::RenderManager &manager)\n{\n\tfor(std::list<Screen *>::iterator it = this->screens.begin(); it!=this->screens.end(); ++it)\n\t{\n\t\tScreen *screen = *it;\n\t\tscreen->render(deltaTime, manager);\n\t}\n}\ndouble ScreenManager::getWidth()\n{\n\treturn this->width;\n}\ndouble ScreenManager::getHeight()\n{\n\treturn this->height;\n}\n" }, { "alpha_fraction": 0.724465548992157, "alphanum_fraction": 0.7292161583900452, "avg_line_length": 21.157894134521484, "blob_id": "d37e2ebd6091257c8c31948d7ab38d3cdd45dadc", "content_id": "c04ee63e12f24847d89118e2bb01bd975d5058b1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 421, "license_type": "no_license", "max_line_length": 52, "num_lines": 19, "path": "/src/cpp/util/Console.hpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#ifndef FYP_UTIL_CONSOLE_HPP_\n#define FYP_UTIL_CONSOLE_HPP_\n\n#define CONSOLE_TAG_MAIN 0\n\n#include <iostream>\n#include <codecvt>\n\ntypedef std::codecvt_utf8<wchar_t> convert_type;\n\nclass Console {\n\tpublic:\n\t\tstatic void println(int index, std::wstring text);\n\t\tstatic void print(int index, std::wstring text);\n\t\tstatic void println(int index, std::string text);\n\t\tstatic void print(int index, std::string text);\n};\n\n#endif\n" }, { "alpha_fraction": 0.7910447716712952, "alphanum_fraction": 0.8059701323509216, "avg_line_length": 15.75, "blob_id": "ad33f50665b841238d03460446497278405c2d8c", "content_id": "d8750beff92ed8d583ec6295b2f1b761d0694ddb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 67, "license_type": "no_license", "max_line_length": 27, "num_lines": 4, "path": "/src/cpp/render/shaders/code/common.h", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "int commonVar;\n\nimport int vertex_position;\nimport int testapple2;\n" }, { "alpha_fraction": 0.7762430906295776, "alphanum_fraction": 0.7762430906295776, "avg_line_length": 16.238094329833984, "blob_id": "e054548aaabdf38a1495fcd746a9ef101d0a0a77", "content_id": "95d4dc85e429e221aee306d29600159287885273", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 362, "license_type": "no_license", "max_line_length": 51, "num_lines": 21, "path": "/src/cpp/input/ControlScheme.cpp", "repo_name": "CHAZICLE/MuseumGuard", "src_encoding": "UTF-8", "text": "#include \"util/DeltaTime.hpp\"\n#include \"input/controls/DebugControls.hpp\"\n\n#include \"ControlScheme.hpp\"\n\nusing namespace world;\nusing namespace controls;\nusing namespace util;\n\nControlScheme::ControlScheme(Entity *controlEntity)\n{\n\tthis->controlEntity = controlEntity;\n}\nControlScheme::~ControlScheme()\n{\n\t\n}\nvoid ControlScheme::tick(DeltaTime &deltaTime)\n{\n\t\n}\n" } ]
145
lauraabend/NLP_ProductClassifier
https://github.com/lauraabend/NLP_ProductClassifier
885329bad7f0dd26688361b679cfa1e25f14be5e
0c659165a7c444ef07c34cbf2452ad57ad6510de
dbc08e2b8b1d257b4ad0a12eeefb5d8ac2168045
refs/heads/master
2021-01-18T12:46:00.842400
2016-07-01T21:17:08
2016-07-01T21:17:08
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6779969930648804, "alphanum_fraction": 0.6849772334098816, "avg_line_length": 42.9466667175293, "blob_id": "078d8878c03008b44ffb9bcebc52d9ae1bf3d187", "content_id": "1377966c9d462c28a9a812de9fbcb8962c764702", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3295, "license_type": "no_license", "max_line_length": 157, "num_lines": 75, "path": "/ClassifyProducts.py", "repo_name": "lauraabend/NLP_ProductClassifier", "src_encoding": "UTF-8", "text": "from nltk.corpus import wordnet\nimport numpy as np\nimport pandas as pd\nfrom nltk.tokenize import TweetTokenizer\nfrom nltk.tag import pos_tag\nfrom nltk.stem.porter import *\nfrom nltk.corpus import stopwords\nfrom sklearn.metrics import confusion_matrix\nfrom sklearn.metrics import classification_report\n\n\ndef assign_product_to_class(class_descriptions, description_of_product):\n comparison_list = []\n description_of_product = list(set(description_of_product))\n description_of_product = [word for word in description_of_product if word not in stopwords.words('english')]\n for className in class_descriptions.keys():\n comparison_per_class = []\n for word1 in class_descriptions[className]:\n word_from_list1 = wordnet.synsets(word1)\n for word2 in description_of_product:\n word_from_list2 = wordnet.synsets(word2)\n if word_from_list1 and word_from_list2:\n s = word_from_list1[0].wup_similarity(word_from_list2[0])\n comparison_per_class.append(s)\n comparison_per_class = [item for item in comparison_per_class if item != None]\n list_of_similar_values = sorted(comparison_per_class, reverse=True)[:5]\n comparison_list.append([np.mean(list_of_similar_values), className])\n return sorted(comparison_list, reverse=True)\n\nstemmer = PorterStemmer()\ntknzr = TweetTokenizer()\n\nclassDescriptions = {\n \"Camera & Photo\": [\"lens\", \"camera\", \"photo\", \"camcorder\", \"photography\", \"image\", \"film\", \"digital\", \"monitor\", \"record\"],\n \"Bedding & Bath\": [\"bed\", \"bath\", \"sheet\", \"towel\", \"shower\", \"tube\", \"bathroom\", \"bedroom\", \"pillow\", \"mattress\", \"sleep\"],\n \"Exercise & Fitness\": [\"exercise\", \"fitness\", \"sport\", \"games\", \"weight\", \"train\", \"resistance\", \"soccer\", \"tennis\", \"golf\", \"yoga\", \"basketball\", \"fit\"]\n}\nfor i in classDescriptions.keys():\n classDescriptions[i] = [stemmer.stem(word) for word in classDescriptions[i]]\n\n\nfile = pd.read_csv(\"./test_set2.csv\", delimiter=\";\", encoding='latin-1')\n\n\nlist_of_products = list(zip(file[\"Product_id\"].tolist(), file[\"Description\"], file[\"Category\"]))\nlist_of_products_ready = [list(elem) for elem in list_of_products]\n\nreal_label = []\nprediction = []\n\nfor i in range(len(list_of_products_ready)):\n # Tokenize the sentence\n tokenized_words = tknzr.tokenize(list_of_products_ready[i][1])\n list_of_products_ready[i].pop(1)\n # Stem the words\n stemed_words = [stemmer.stem(plural) for plural in tokenized_words]\n # Tag the morphology of the word\n tagged_words = pos_tag(stemed_words)\n # Only select the NN and NNP\n only_nouns = [word for word, pos in tagged_words if pos == 'NN' or pos == 'NNP']\n # Append the resulting words\n list_of_products_ready[i].append(only_nouns)\n\n # Start classification\n similatiry_to_classes = assign_product_to_class(classDescriptions, list_of_products_ready[i][2])\n list_of_products_ready[i].insert(2, similatiry_to_classes[0][1])\n\n real_label.append(list_of_products_ready[i][1])\n prediction.append(list_of_products_ready[i][2])\n print(list_of_products_ready[i])\n\n\nprint(confusion_matrix(real_label, prediction))\n\nprint(classification_report(real_label, prediction, target_names=[\"Exercise & Fitness\", \"Camera & Photo\", \"Bedding & Bath\"]))" } ]
1
gorag/persons-information
https://github.com/gorag/persons-information
5b2141898c38325a8f8aacaf2fd0213256020e6c
87b9b3bb7253e9b47211a4e9c641457f7538f845
69a7af1e656cbd7d31c5046c14eb75263f95fa10
refs/heads/master
2020-03-28T07:50:24.802123
2018-09-11T12:50:40
2018-09-11T12:50:40
147,926,904
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7553191781044006, "alphanum_fraction": 0.7553191781044006, "avg_line_length": 17.799999237060547, "blob_id": "94bf0d8ee3dec827133d75adf60c8387e6070182", "content_id": "1d17bdd38c117f85a04af8177beb9f089e0f68c5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 94, "license_type": "no_license", "max_line_length": 33, "num_lines": 5, "path": "/persons_information/card_index/apps.py", "repo_name": "gorag/persons-information", "src_encoding": "UTF-8", "text": "from django.apps import AppConfig\n\n\nclass CardIndexConfig(AppConfig):\n name = 'card_index'\n" }, { "alpha_fraction": 0.7112970948219299, "alphanum_fraction": 0.7112970948219299, "avg_line_length": 25.55555534362793, "blob_id": "b87a849bf82eec1c2b68fae408b2a668c85b23e0", "content_id": "04bfef0bdd7827198303ec4f9b9404695f30f3bf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 239, "license_type": "no_license", "max_line_length": 84, "num_lines": 9, "path": "/persons_information/card_index/urls.py", "repo_name": "gorag/persons-information", "src_encoding": "UTF-8", "text": "from django.urls import path\n\nfrom card_index import views\nfrom card_index.views import persons\n\nurlpatterns = [\n path('', persons, name='persons'),\n path('detail/<int:pk>', views.PersonDetailView.as_view(), name='person-detail'),\n]\n" }, { "alpha_fraction": 0.5896580219268799, "alphanum_fraction": 0.598832368850708, "avg_line_length": 29.743589401245117, "blob_id": "aa446b830608b7b1dd5868d1fe12ebb5a35f6d30", "content_id": "1be16d455d9544f10b71f6fffdbc8c9776681405", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1199, "license_type": "no_license", "max_line_length": 91, "num_lines": 39, "path": "/persons_information/card_index/factories.py", "repo_name": "gorag/persons-information", "src_encoding": "UTF-8", "text": "from datetime import timedelta\n\nimport factory\nfrom factory import fuzzy\n\nfrom . import models\n\n\nclass PersonsFactory(factory.django.DjangoModelFactory):\n class Meta:\n model = models.Person\n\n gender = fuzzy.FuzzyChoice(choices=['MALE', 'FEMALE'])\n full_name = factory.LazyAttribute(\n lambda o:\n factory.Faker('name_male', 'ru_RU').generate({})\n if o.gender == 'MALE'\n else factory.Faker('name_female', 'ru_RU').generate({})\n )\n date_of_birth = factory.Faker('date_of_birth', 'ru_RU', minimum_age=16, maximum_age=60)\n phone_number = factory.Faker('phone_number', 'ru_RU')\n start_date = factory.LazyAttribute(\n lambda o:\n factory.Faker(\n 'date_between_dates',\n 'ru_RU',\n date_start=o.date_of_birth + timedelta(days=365 * 16)\n ).generate({})\n )\n end_date = factory.LazyAttribute(\n lambda o:\n factory.Faker(\n 'date_between_dates',\n 'ru_RU',\n date_start=o.start_date\n ).generate({})\n )\n training_group = factory.Faker('sentence', 'ru_RU', nb_words=3)\n educational_institution = factory.Faker('sentence', 'ru_RU', nb_words=3)\n" }, { "alpha_fraction": 0.6116279363632202, "alphanum_fraction": 0.6139534711837769, "avg_line_length": 36.39130401611328, "blob_id": "8467ae54d4e0e1d09c6991d363c886b7f002776a", "content_id": "0018d79777d57e4e59e2e9d013796576e6ebe0f2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 867, "license_type": "no_license", "max_line_length": 117, "num_lines": 23, "path": "/persons_information/card_index/tables.py", "repo_name": "gorag/persons-information", "src_encoding": "UTF-8", "text": "import django_tables2 as tables\n\nfrom .models import Person\n\n\nclass PersonTable(tables.Table):\n passport = tables.Column(empty_values=(), verbose_name='Паспорт')\n delete = tables.TemplateColumn(\n '<a href=\"{{ record.get_absolute_url }}\" class=\"btn btn-default\" >'\n '<span class=\"glyphicon glyphicon-expand\"></span>'\n '</a>',\n verbose_name='')\n\n def render_passport(self, record):\n if record.documents.exists():\n return ', '.join([p.number if p.type == 'PASSPORT' else '' for p in record.documents.all()])\n return ''\n\n class Meta:\n model = Person\n fields = ('full_name', 'date_of_birth', 'gender', 'phone_number', 'start_date', 'end_date', 'training_group',\n 'educational_institution', 'passport')\n template_name = 'django_tables2/bootstrap-responsive.html'\n" }, { "alpha_fraction": 0.8214285969734192, "alphanum_fraction": 0.8214285969734192, "avg_line_length": 22.33333396911621, "blob_id": "45e4c16720fe50a05e6214fffb0cbb13f307d509", "content_id": "32c237dfa0f2673fe30d516a49a5c8fb53e9d1c6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 140, "license_type": "no_license", "max_line_length": 46, "num_lines": 6, "path": "/persons_information/card_index/admin.py", "repo_name": "gorag/persons-information", "src_encoding": "UTF-8", "text": "from django.contrib import admin\n\nfrom card_index.models import Person, Document\n\nadmin.site.register(Person)\nadmin.site.register(Document)\n" }, { "alpha_fraction": 0.5, "alphanum_fraction": 0.7142857313156128, "avg_line_length": 18, "blob_id": "1246198094b468767bdbbb561f7c0fd43e670df6", "content_id": "8f872d6d9300c3f6b43ec9604c7a6a557081e3d6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 56, "license_type": "no_license", "max_line_length": 22, "num_lines": 3, "path": "/requirements.txt", "repo_name": "gorag/persons-information", "src_encoding": "UTF-8", "text": "Django==2.1.1\ndjango-tables2==1.21.2\nfactory-boy==2.11.1" }, { "alpha_fraction": 0.7728337049484253, "alphanum_fraction": 0.7751756310462952, "avg_line_length": 27.46666717529297, "blob_id": "1c998ee9a3d35fbb21a3e285773757482ffcbcb5", "content_id": "1f0f0715ac1486aee7450fb70de237ba96d53312", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 427, "license_type": "no_license", "max_line_length": 75, "num_lines": 15, "path": "/persons_information/card_index/views.py", "repo_name": "gorag/persons-information", "src_encoding": "UTF-8", "text": "from django.shortcuts import render\nfrom django.views.generic import DetailView\nfrom django_tables2 import RequestConfig\nfrom .models import Person\nfrom .tables import PersonTable\n\n\ndef persons(request):\n table = PersonTable(Person.objects.all())\n RequestConfig(request).configure(table)\n return render(request, 'card_index/person_list.html', {'table': table})\n\n\nclass PersonDetailView(DetailView):\n model = Person\n" }, { "alpha_fraction": 0.7634854912757874, "alphanum_fraction": 0.7759336233139038, "avg_line_length": 15.066666603088379, "blob_id": "c0c964fbc228a56a2ad3bd833e22d035e0cb6088", "content_id": "1451570ffc8bc87af43233810b73aef6158e9b33", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 241, "license_type": "no_license", "max_line_length": 54, "num_lines": 15, "path": "/README.md", "repo_name": "gorag/persons-information", "src_encoding": "UTF-8", "text": "# Persons Information\n\n### Install\n```\npip install -r requirements.txt\n\ncd persons_information\n\npython manage.py makemigrations\npython manage.py migrate\n\npython generate_fake_data.py num_records (default=100)\n\npython manage.py runserver\n```\n" }, { "alpha_fraction": 0.6438923478126526, "alphanum_fraction": 0.6563146710395813, "avg_line_length": 23.149999618530273, "blob_id": "ad9e07585d337c359b7cd9cef1dd9a9a2ec5898d", "content_id": "d886a0833f83246d1846e35b647d9db5f62bde9e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 483, "license_type": "no_license", "max_line_length": 83, "num_lines": 20, "path": "/persons_information/generate_fake_data.py", "repo_name": "gorag/persons-information", "src_encoding": "UTF-8", "text": "if __name__ == \"__main__\":\n import os\n import sys\n\n sys.path.extend(sys.path[0])\n\n if len(sys.argv) == 1:\n num_records = 100\n else:\n num_records = int(sys.argv[1])\n\n import django\n\n os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'persons_information.settings')\n\n django.setup()\n from card_index.models import Person\n from card_index.factories import PersonsFactory\n\n Person.objects.bulk_create(PersonsFactory.build_batch(num_records))\n" } ]
9
DLTech21/Ice-demo
https://github.com/DLTech21/Ice-demo
3f26514a715f8a983a616d9722a1a3d0c2a4c8fc
41d059de932dc332ad7ca02449a8e41c74584ba5
09bfdb317043f0cf2e67acf9e3b67b51253196fb
refs/heads/master
2020-03-27T10:26:48.250723
2017-05-02T09:21:26
2017-05-02T09:21:26
146,420,742
0
1
null
2018-08-28T09:04:31
2018-08-20T06:20:18
2017-05-02T09:22:14
null
[ { "alpha_fraction": 0.6501901149749756, "alphanum_fraction": 0.6996197700500488, "avg_line_length": 23.370370864868164, "blob_id": "e995edec578a0f28b8c193235b6e8ec05f969743", "content_id": "6bb0d7f1a6d16229b65728d55d1869c6940e9d47", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1649, "license_type": "no_license", "max_line_length": 98, "num_lines": 54, "path": "/Centos安装Ice的PHP.md", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "# Centos安装Ice的PHP\n本文档来自[陈志新](http://weibo.com/czhixin),感谢陈志新。\n\n<span style=\"color:#f00\">在CentOS系统,以下过程在/opt目录下完成,/opt是Ice依赖和Ice默认目录,减少配置修改。</span>\n\n## 安装\n1. 编译安装第三方库(依赖)\n\n* 编译安装Berkeley DB 5.3.28\n```\nwget http://zeroc.com/download/berkeley-db/db-5.3.28.NC.tar.gz\ntar xzf db-5.3.28.NC.tar.gz\ncd db-5.3.28.NC\nwget http://zeroc.com/download/berkeley-db/berkeley-db.5.3.28.patch\npatch -p0 < ./berkeley-db.5.3.28.patch\ncd db-5.3.28.NC/build_unix\n../dist/configure --enable-cxx --enable-java --prefix=/opt/db53 根据需要—enable=java可以不要,如果要支持java则需要\nmake && make install\ncd /opt/db53\nln -s lib lib64\n```\n\n* 编译安装Ice的依赖库\n```\ngit clone https://github.com/zeroc-ice/mcpp.git\ncd mcpp\nmake && make install\ncp lib64/libmcpp.a /usr/lib64\n```\n\n2. 编译安装Ice For cpp\n```\nwget http://github.com/zeroc-ice/ice/archive/v3.6.3.tar.gz\ntar -xvf v3.6.3.tar.gz\ncd ice-3.6.3/cpp\nvi config/Make.rules.Linux\n找到BASELIBS = -lIceUtil 在后台添加-liconv,修改后BASELIBS = -lIceUtil -liconv\nmake && make install\n```\n\n3. 编译安装Ice For php\n```\n回到/opt/ice-3.6.3\ncd /opt/ice-3.6.3/php\n修改config/Make.rules.php\ncd config/Make.rules.php\nLP64 := yes 注释去掉\nUSE_NAMESPACES ?= yes 注释去掉\nPHP_CONFIG ?= php-config对应位置\nmake && make install\n在/opt/ice-3.6.3/php/lib下得到IcePHP.so\n```\n\n4. 将IcePHP.so放入PHP扩展目录,并修改php.ini(参考其他扩展的修改方式,过程通用),重启php服务器" }, { "alpha_fraction": 0.7784090638160706, "alphanum_fraction": 0.7795454263687134, "avg_line_length": 25.66666603088379, "blob_id": "5ae1fd66f8de51076b3da070ff05314dbfb7f15a", "content_id": "1a02e50ecc62fe4f96d071e387bfea4ac6ad0b49", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 884, "license_type": "no_license", "max_line_length": 102, "num_lines": 33, "path": "/android/IceClient/src/com/zzwtec/iceclient/FetchMyTickOrdersAsyncTask.java", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "package com.zzwtec.iceclient;\n\nimport com.zzwtec.iceTicketProject.util.ice.IceClientUtil;\nimport com.zzwtec.ticket.ticket.Order;\nimport com.zzwtec.ticket.ticket.TicketServicePrx;\n\nimport android.os.AsyncTask;\n\npublic class FetchMyTickOrdersAsyncTask extends AsyncTask<String, Integer, Order[]> {\n\n\tprivate MainActivity mainActivity;\n\t\n\tpublic FetchMyTickOrdersAsyncTask(MainActivity mainActivity){\n\t\tthis.mainActivity = mainActivity;\n\t}\n\t\n\t@Override\n\tprotected Order[] doInBackground(String... phone) {\n\t\ttry{\n\t\t\tTicketServicePrx ticketService = null;\n\t\t\t//Registry方式\n\t\t\tticketService = (TicketServicePrx)IceClientUtil.getServicePrx(mainActivity,TicketServicePrx.class);\n\t\t\treturn ticketService.queryMyOrders(phone[0]);\n\t\t}catch(Exception e){\n\t\t\te.printStackTrace();\n\t\t\treturn null;\n\t\t}\n\t}\n\t\n\tprotected void onPostExecute(Order[] orders) {\n\t\tmainActivity.updateList(orders);\n\t}\n}\n" }, { "alpha_fraction": 0.7047619223594666, "alphanum_fraction": 0.7428571581840515, "avg_line_length": 52, "blob_id": "6676b06238ef7c06364a1c1d2f413a6db8e7afb2", "content_id": "127728fbaac8dbcbb3e4da04b54a31e3de15c2e2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 105, "license_type": "no_license", "max_line_length": 93, "num_lines": 2, "path": "/IceAdmin.sh", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "#!/bin/bash\nicegridadmin -u test -p test --Ice.Default.Locator=\"IceGrid/Locator:tcp -h localhost -p 4061\"" }, { "alpha_fraction": 0.8026315569877625, "alphanum_fraction": 0.8157894611358643, "avg_line_length": 20.714284896850586, "blob_id": "1e6983144879ecc69208b690563c01338a9ab4cf", "content_id": "9245b1405633958b1354972122cc164446f464cd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 152, "license_type": "no_license", "max_line_length": 49, "num_lines": 7, "path": "/java/iceTicketProject/src/main/java/com/zzwtec/iceTicketProject/ice/glacier2/Glacier2Callback.java", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "package com.zzwtec.iceTicketProject.ice.glacier2;\n\nimport Ice.ObjectPrx;\n\npublic interface Glacier2Callback {\n\tpublic void callback(ObjectPrx proxy);\n}\n" }, { "alpha_fraction": 0.6112759709358215, "alphanum_fraction": 0.6290801167488098, "avg_line_length": 23.467741012573242, "blob_id": "c54adebc257109b83706d39ba63ed78ca64f75b7", "content_id": "6c71ab4f8b052b02b3ae73b3ca5e76b71b9f96b6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 3075, "license_type": "no_license", "max_line_length": 96, "num_lines": 124, "path": "/IceApp.sh", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "#!/bin/bash\nsource /etc/profile\nSELF=$(cd $(dirname $0); pwd -P)/$(basename $0)\nSELF_PATH=`dirname $SELF`\n\naction=\"$1\"\n\nIce_Version=\"3.6.1\"\n\nIce_gridregistry_config=\"./Ice_gridregistry/icegridregistry.cfg\"\nIce_gridregistry_log=\"${SELF_PATH}/Ice_gridregistry/Ice_gridregistry.log\"\nIce_gridregistry_pid_file=\"${SELF_PATH}/Ice_gridregistry/IceGridRegistry.pid\"\n\nIce_gridnode_config=\"./Ice_gridnode/icegridnode1.cfg\"\nIce_gridnode_log=\"${SELF_PATH}/Ice_gridnode/Ice_gridnode.log\"\nIce_gridnode_pid_file=\"${SELF_PATH}/Ice_gridnode/Ice_gridnode.pid\"\n\nIce_glacier2_config=\"./Ice_glacier2/iceglacier2.cfg\"\nIce_glacier2_log=\"${SELF_PATH}/Ice_glacier2/Ice_glacier2.log\"\nIce_glacier2_pid_file=\"${SELF_PATH}/Ice_glacier2/Ice_glacier2.pid\"\n\n#ice grid registry\nstart_gridregistry(){\n icegridregistry --Ice.Config=${Ice_gridregistry_config} >>${Ice_gridregistry_log} 2>&1 &\n echo $!>${Ice_gridregistry_pid_file}\n echo \"启动Ice ${Ice_Version} Grid Registry,PID=`cat ${Ice_gridregistry_pid_file}`\"\n}\nstop_gridregistry(){\n if [ -f ${Ice_gridregistry_pid_file} ] ;then\n kill -term `cat ${Ice_gridregistry_pid_file}`\n rm -f ${Ice_gridregistry_pid_file}\n echo \"关闭Ice ${Ice_Version} Grid Registry\"\n else\n echo \"Ice ${Ice_Version} Grid Registry未启动\"\n fi\n}\nrestart_gridregistry(){\n stop_gridregistry\n sleep 2\n start_gridregistry\n}\n\n#ice grid node\nstart_gridnode(){\n icegridnode --Ice.Config=${Ice_gridnode_config} >>${Ice_gridnode_log} 2>&1 &\n echo $!>${Ice_gridnode_pid_file}\n echo \"启动Ice ${Ice_Version} Grid Node,PID=`cat ${Ice_gridnode_pid_file}`\"\n}\nstop_gridnode(){\n if [ -f ${Ice_gridnode_pid_file} ] ;then\n kill -term `cat ${Ice_gridnode_pid_file}`\n rm -f ${Ice_gridnode_pid_file}\n echo \"关闭Ice ${Ice_Version} Grid Node\"\n else\n echo \"Ice ${Ice_Version} Grid Node未启动\"\n fi\n}\nrestart_gridnode(){\n stop_gridnode\n sleep 2\n start_gridnode\n}\n\n#glacier2\nstart_glacier2(){\n glacier2router --Ice.Config=${Ice_glacier2_config} >>${Ice_glacier2_log} 2>&1 &\n echo $!>${Ice_glacier2_pid_file}\n echo \"启动Ice ${Ice_Version} Glacier2,PID=`cat ${Ice_glacier2_pid_file}`\"\n}\n\nstop_glacier2(){\n if [ -f ${Ice_glacier2_pid_file} ] ;then\n kill -term `cat ${Ice_glacier2_pid_file}`\n rm -f ${Ice_glacier2_pid_file}\n echo \"关闭Ice ${Ice_Version} Glacier2\"\n else\n echo \"Ice ${Ice_Version} Glacier2未启动\"\n fi\n}\n\nrestart_glacier2(){\n stop_glacier2\n sleep 2\n start_glacier2\n}\n\nstart(){\n start_gridregistry\n sleep 2\n start_gridnode\n sleep 2\n start_glacier2\n}\nstop(){\n stop_glacier2\n sleep 2\n stop_gridnode\n sleep 2\n stop_gridregistry\n}\nrestart(){\n stop\n sleep 2\n start\n}\n\ncase \"${action:-''}\" in\n start)\n start\n ;;\n stop)\n stop\n ;;\n restart)\n restart\n ;;\n restartGlaicer2)\n restart_glacier2\n ;;\n *) \n echo \"Usage:$0 start|stop|restart|restartGlaicer2\" \n exit 1\n ;;\nesac" }, { "alpha_fraction": 0.7336065769195557, "alphanum_fraction": 0.7469262480735779, "avg_line_length": 26.11111068725586, "blob_id": "20dc9b6b98d1ec62e72a4f2322a835b42b710b20", "content_id": "4eda6b2d92bb65f6de39f0d255af19917f6da43d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1610, "license_type": "no_license", "max_line_length": 236, "num_lines": 36, "path": "/Linux系统下的日志切割.md", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "Ice在我们的服务器上跑了半年左右,发现有些日志文件是Ice内部生成的,而这些日志文件没有相关的设置可以按照我们的需求来切割。问了以前做运维的同事,他推荐我使用Linux系统自带的日志轮替 logrotate 工具,做日志轮替。刚刚好我们的服务器是CentOS6.5也有,所以就上网搜索相关资料了。\n\n说明如果系统没有的话可以安装 logrotate,由于logrotate依赖crontab,所以也要安装crontab。如何安装这里就不说了,搜索有很多相关资料。\n\n废话不多说,直接上相关配置文件。在/etc/logrotate.d/下创建了ice_grid_log和ice_glacier2_log,分别编辑以下内容。这样就能可以每日切割日志出来。如果想马上切割,可以输入以下指令,这样就能将/home/Ice/Ice_glacier2/Ice_glacier2.log日志切割出来,切出来的文件是/home/Ice/Ice_glacier2/Ice_glacier2.log-yyyymmdd,yyyy是年份,mm是月份,dd是日期。\n\n\tlogrotate -vf /etc/logrotate.d/ice_glacier2_log\n\n#### ice_grid_log内容\n\t/home/Ice/grid/*.log {\n \t\tdaily\n \t\trotate 7\n \t\tdateext\n \t\tcreate\n \t\tmissingok\n \t\tnotifempty\n \t\tcopytruncate\n \t\tnocompress\n \t\tnoolddir\n \t}\n\n#### ice_glacier2_log内容\n\t/home/Ice/Ice_glacier2/Ice_glacier2.log {\n \t\tdaily\n \t\trotate 7\n \t\tdateext\n \t\tcreate\n \t\tmissingok\n \t\tnotifempty\n \t\tcopytruncate\n \t\tnocompress\n \t\tnoolddir\n \t}\n \t\n#### 结语\n看了logrotate,功能很强大,能自动清理过时的文件,能将切割出来的日志进行压缩,也能通过邮件发送给指定的邮箱。\n" }, { "alpha_fraction": 0.790224552154541, "alphanum_fraction": 0.8028578758239746, "avg_line_length": 44.595916748046875, "blob_id": "66497460ccb2c97c258af8b803681d7de8fca1ee", "content_id": "7cc38b361758799cc4e3790f78f974bf0e345e44", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 142253, "license_type": "no_license", "max_line_length": 462, "num_lines": 1960, "path": "/Ice 3.6.1配置参数说明.md", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "如果翻译有误请指出\n#[Ice 3.6.1配置参数说明](https://doc.zeroc.com/display/Ice36/Property+Reference)\n\n---\n\n# <span id=\"目录\">目录</span>\n* [Ice](#Ice)\n* [Freeze](#Freeze)\n* [Glacier2](#Glacier2)\n* [Ice.ACM](#Ice.ACM)\n* [Ice.Admin](#Ice.Admin)\n* [Ice.Default](#Ice.Default)\n* [Ice.InitPlugins](#Ice.InitPlugins)\n* [Ice.IPv4](#Ice.IPv4)\n* [Ice.IPv6](#Ice.IPv6)\n* [Ice.Override](#Ice.Override)\n* [Ice.Plugin](#Ice.Plugin)\n* [Ice.PluginLoadOrder](#Ice.PluginLoadOrder)\n* [Ice.PreferIPv6Address](#Ice.PreferIPv6Address)\n* [Ice.TCP](#Ice.TCP)\n* [Ice.ThreadPool](#Ice.ThreadPool)\n* [Ice.Trace](#Ice.Trace)\n* [Ice.UDP](#Ice.UDP)\n* [Ice.Warn](#Ice.Warn)\n* [IceBox](#IceBox)\n* [IceBoxAdmin](#IceBoxAdmin)\n* [IceDiscovery](#IceDiscovery)\n* [IceGrid](#IceGrid)\n* [IceGridAdmin](#IceGridAdmin)\n* [IceLocatorDiscovery](#IceLocatorDiscovery)\n* [IceMX.Metrics](#IceMX.Metrics)\n* [IcePatch2](#IcePatch2)\n* [IcePatch2Client](#IcePatch2Client)\n* [IceSSL](#IceSSL)\n* [IceStorm Properties](#IceStorm Properties)\n* [IceStormAdmin](#IceStormAdmin)\n\n---\n[返回目录](#目录)\n## <span id=\"Ice\">Ice</span>\n###### <span id=\"Ice.BackgroundLocatorCacheUpdates\">Ice.BackgroundLocatorCacheUpdates</span>,格式:\n\tIce.BackgroundLocatorCacheUpdates=num\n如果设置是0(默认),一种间接代理的调用,其端点比配置的定位器缓存超时触发一个定位器缓存更新;运行时间延迟了调用,直到新的端点返回到定位器。\n如果设置大于0,在与过期端点的间接代理调用时,仍会触发一个定位器缓存更新,但在后台执行更新,并且运行时间使用过期的终结点进行调用。这样就避免了在缓存条目过期之后的第一次调用的延迟。\n\n###### <span id=\"Ice.BatchAutoFlushSize\">Ice.BatchAutoFlushSize</span>,格式:\n\tIce.BatchAutoFlushSize=num\n此属性控制如何在Ice运行时间处理批处理消息的冲洗。如果num大于0,当新信息添加到批处理和信息会导致批量超过num千字节时,运行时自动强制刷新当前批处理。如果num设置为0或负数,批次必须按应用程序显式刷新。默认是1024。\n当刷新,批处理请求被发送为一个单一的Ice信息。接收器中的Ice运行时间限制到[Ice.MessageSizeMax](#Ice.MessageSizeMax)指定的传入消息的最大大小。因此,发送者必须定期冲洗批处理请求(无论是手动或自动),以确保他们不超过接收器的配置限制。\n\n###### <span id=\"Ice.CacheMessageBuffers\">Ice.CacheMessageBuffers</span>,格式:\n\tIce.CacheMessageBuffers=num (Java, .NET)\n如果num大于0,Ice运行时缓存消息缓冲区,用于将来重用。这可以提高性能和降低Ice的内部垃圾收集器去回收垃圾所花时间。然而,对于交换非常大的消息的应用程序,这个高速缓存可能消耗过多的内存,因此应该被禁用,通过设置此属性为零。\n此属性影响缓存同步调用消息缓冲区。ICE运行时不会缓存异步调用消息缓冲区。\n###### 平台提醒\n###### Java\n当设置为1,Ice分配非直接消息缓存;设置为2,Ice分配直接消息缓存。使用直接消息缓冲区可以最大限度地减少复制,通常会导致提高吞吐量的结果。默认是2。\n###### .NET\n默认是1。\n\n###### <span id=\"Ice.ChangeUser\">Ice.ChangeUser</span>,格式:\n\tIce.ChangeUser=user (C++ & Unix only)\n如果设置,Ice将用户和组id更改为相应的在/etc/passwd里user的身份。这只有当超级用户执行的Ice应用程序才有效。\n\n###### <span id=\"Ice.CollectObjects\">Ice.CollectObjects</span>,格式:\n\tIce.CollectObjects=num (C++)\nIce的C++包括回收片类的实例,恢复由Ice运行时间循环图的垃圾收集设施。将此属性设置为1个导致Ice运行时间来假设一个程序接收的所有循环对象图都有资格获得默认的集合。默认是0。\n\n###### <span id=\"Ice.Compression.Level\">Ice.Compression.Level</span>,格式:\n\tIce.Compression.Level=num\n指定用于压缩协议消息的bzip2压缩级别。合法值是1到9,其中1个代表最快的压缩和9个代表最好的压缩。值得注意的是,高水平导致bzip2算法投入更多的资源来压缩的努力,可能不会导致较低水平的显著提高。默认是1。\n\n###### <span id=\"Ice.ConsoleListener\">Ice.ConsoleListener</span>,格式:\n\tIce.ConsoleListener=num (.NET)\n如果num非0,Ice运行时安装一个ConsoleTraceListener写信息到stderr。如果num是0,日志禁用。设置[Ice.LogFile](#Ice.LogFile)可重写该属性:如果[Ice.LogFile](#Ice.LogFile)设置,消息被写到日志文件中,不理会[Ice.ConsoleListener](#Ice.ConsoleListener)设置。\n\n###### <span id=\"Ice.EventLog.Source\">Ice.EventLog.Source</span>,格式:\n\tIce.EventLog.Source=name (C++ & Windows only)\n指定一个事件日志源的名称,该名称是Ice::Service的子类将被用于Windows服务。该name相当于在Eventlog的subkey注册一个key。应用程序(或管理员)通常在安装服务时准备注册表项。如果没有找到匹配的注册key,则Windows在应用日志中记录事件。在名称的任何反斜杠默转化为正斜杠。如果没有定义,Ice::Service通过--service选项指定使用服务名称。\n\n###### <span id=\"Ice.HTTPProxyHost\">Ice.HTTPProxyHost</span>,格式:\n\tIce.HTTPProxyHost=addr\n指定HTTP代理服务端的主机名或IP地址。如果addr非空,Ice为所有的输出(客户端)连接使用指定的HTTP代理服务端。\n\n###### <span id=\"Ice.HTTPProxyPort\">Ice.HTTPProxyPort</span>,格式:\n\tIce.HTTPProxyPort=num\n指定HTTP代理服务端的端口。默认是1080。\n\n###### <span id=\"Ice.ImplicitContext\">Ice.ImplicitContext</span>,格式:\n\tIce.ImplicitContext=type\n指定一个通信者是否有一个隐含的请求上下文,如果是这样的话,在什么范围内适用。合法值:None(相当于空字符串)、PerThread和Shared。默认值是None。\n\n###### <span id=\"Ice.LogFile\">Ice.LogFile</span>,格式:\n\tIce.LogFile=file\n一个基于记录仪实现简单的文件取代了通信器的默认日志。这个属性不影响每个进程记录器。记录器创建指定的文件,如果有必要,否则添加文件。如果记录器无法打开文件,在通信器初始化中应用程序收到一个InitializationException。如果在通信器初始化中为一个记录器对象提供InitializationData参数,它比这个属性优先。日志器不提供日志文件维护任何内置的支持(如日志旋转),但它可以和系统工具如logrotate并存。\n\n###### <span id=\"Ice.LogStdErr.Convert\">Ice.LogStdErr.Convert</span>,格式:\n\tIce.LogStdErr.Convert=num (C++)\n如果num大于0,在Windows上,通信器的默认日志器将日志消息从应用的窄字符串编码(定义所安装的窄字符串转换器,如果有的话)的Windows控制台代码页。如果[Ice.StdErr](#Ice.StdErr)没有设置,默认是1,否则是0。这个属性是由第一个在一个过程中创建的通信程序读取的,它被其他的通信者忽略了。\n\n###### <span id=\"Ice.MessageSizeMax\">Ice.MessageSizeMax</span>,格式:\n\tIce.MessageSizeMax=num\n该属性控制的最大大小(KB)无压缩协议的消息是由Ice运行时间接受。大小包括冰协议头的大小。默认是1024。此属性的唯一目的是为了防止恶意或有缺陷的发件人从触发一个大容量的内存分配的接收器。如果这不是一个值得关注的问题,你可以设置[Ice.MessageSizeMax](#Ice.MessageSizeMax)为0;截至Ice3.6,设置此属性为零(或负数)禁用消息大小限制。\n如果在接收Ice运行时遇到的传入消息的大小超过[Ice.MessageSizeMax](#Ice.MessageSizeMax)接收机的设置,运行时间报出了一个MemoryLimitException和关闭连接。如,当客户端收到一个超大的回复信息,其结果是一个MemoryLimitException调用。当一个服务端接收到一个超大的请求消息时,客户端接收到一个ConnectionLostException(因为服务端关闭连接)并且如果[Ice.Warn.Connections](#Ice.Warn.Connections)设置服务端会记录一条消息。\n\n###### <span id=\"Ice.Nohup\">Ice.Nohup</span>,格式:\n\tIce.Nohup=num\n如果num大于0,应用程序方便类(以及在C++的Ice::Service类)在Unix 忽略SIGHUP和在Windows忽略CTRL_LOGOFF_EVENT。作为结果,如果用户开始申请注销,这套使用Ice.Nohup应用继续运行。应用程序的默认值为0,Ice::Service默认是1。\n\n###### <span id=\"Ice.NullHandleAbort\">Ice.NullHandleAbort</span>,格式:\n\tIce.NullHandleAbort=num\n如果num大于0,调用操作使用一个空的智能指针导致程序终止,而不是报IceUtil::NullHandleException。\n\n###### <span id=\"Ice.Package.module\">Ice.Package.module</span>,格式:\n\tIce.Package.module=package (Java)\nIce为Java允许自定义生成的代码的包装。如果您使用此功能,Ice运行时需要额外的配置成功解包异常和混合类。此属性将一个顶层的切片模块与一个Java的package关联。如果所有的顶层模块生成相同的用户定义的包,这比[Ice.Default.Package](#Ice.Default.Package)更容易使用。\n\n###### <span id=\"Ice.PrintAdapterReady\">Ice.PrintAdapterReady</span>,格式:\n\tIce.PrintAdapterReady=num\n如果num大于0,一个对象适配器在激活完成后在标准输出打印“adapter_name ready”。这是非常有用的脚本,需要等待,直到一个对象适配器是准备使用。\n\n###### <span id=\"Ice.PrintProcessId\">Ice.PrintProcessId</span>,格式:\n\tIce.PrintProcessId=num\n如果num大于0,在启动时,在标准输出上打印处理标识。\n\n###### <span id=\"Ice.PrintStackTraces\">Ice.PrintStackTraces</span>,格式:\n\tIce.PrintStackTraces=num (JavaScript, C++)\n如果num大于0,插入一个来自IceUtil::Exception的异常在记录器的辅助类(如Ice::Warning)也显示出异常的堆栈跟踪。同样的,在基础异常类的ice_stackTrace方法,IceUtil::Exception,将返回堆栈或空字符串取决于num的值。如果没有设置,默认值取决于如何编译的Ice运行时间:0对于一个优化的构建和1对于调试版本。堆栈跟踪是当前不可用在Ice的C++和Python在Linux/ARM,不管这个属性的值。\n在Windows上,堆栈跟踪,可在调试版本和发布版本建立的环境变量RELEASEPDBS=yes。发布的DLL纳入标准Ice二进制分配是建立在这个设置启用。.PDB文件用于调试和发布分发版本在一个单独的Ice PDB Windows安装程序。\n当生产一个堆栈,Windows在定位存储中关联DLLs尝试定位.PDB文件;如果.PDB文件在这个定位存储找不到,Windows会使用搜索一下路径尝试定位这个文件,当前工作目录路径,然后是IceUtil DLL路径,最后是_NT_SYMBOL_PATH环境变量指定的路径。因此,如果.PDB文件不是你创建的,你需要拷贝他们放在IceUtil DLL之后或在_NT_SYMBOL_PATH环境变量的里添加它们所在目录的路径。\n该属性只支持JavaScript、C++和使用C++运行时的脚本语言(Python, Ruby, PHP)。注意,此属性使Python,Ruby和PHP将只显示的C/C + +的堆栈跟踪。\n\n###### <span id=\"Ice.ProgramName\">Ice.ProgramName</span>,格式:\n\tIce.ProgramName=name\nname是程序的名称,在初始化是,从argv[0] (C++)和AppDomain.CurrentDomain.FriendlyName (.NET) 自动设置的。对于Java,Ice.ProgramName初始化为空字符串。默认的名称可以通过设置这个属性重写。\n\n###### <span id=\"Ice.RetryIntervals\">Ice.RetryIntervals</span>,格式:\n\tIce.RetryIntervals=num [num ...]\n这个属性定义操作次数自动重试和彼此之间的延迟重试。例如,如果该属性设置为0 100 500,操作重试3次:第一次失败后立即重试,在第二次失败后等待100ms再试,在第三次失败后等待500ms再试。默认值(0)意味着Ice立即重试一次。如果设置为-1,不重试。\n\n###### <span id=\"Ice.ServerIdleTime\">Ice.ServerIdleTime</span>,格式:\n\tIce.ServerIdleTime=num\n如果num大于0,通信器已闲置num秒,Ice自动调用Communicator::shutdown一次。这种关闭通讯的服务端,导致所有线程等待Communicator::waitForShutdown返回。在那之后,一个服务端通常在退出前做一些清理工作。默认是0,即服务端不会自动关闭。此属性通常用于服务器通过IceGrid自动激活。\n\n###### <span id=\"Ice.SOCKSProxyHost\">Ice.SOCKSProxyHost</span>,格式:\n\tIce.SOCKSProxyHost=addr\n为SOCKS代理服务端指定主机名和IP地址。如果addr不为空,Ice为所有的输出连接(客户端)使用指定的SOCKS代理服务端。\nIce当前只支持SOCKS4协议,即只运行IPv4连接。\n\n###### <span id=\"Ice.SOCKSProxyPort\">Ice.SOCKSProxyPort</span>,格式:\n\tIce.SOCKSProxyPort=num\nSOCKS代理服务端的端口。默认是1080。\n\n###### <span id=\"Ice.StdErr\">Ice.StdErr</span>,格式:\n\tIce.StdErr=filename\n如果filename不为空,程序的标准错误流指向该文件,用追加放送。此属性仅检查在进程中创建的第一个通信。\n\n###### <span id=\"Ice.StdOut\">Ice.StdOut</span>,格式:\n\tIce.StdOut=filename\n如果filename不为空,程序的标准输出流指向该文件,用追加放送。此属性仅检查在进程中创建的第一个通信。\n\n###### <span id=\"Ice.SyslogFacility\">Ice.SyslogFacility</span>,格式:\n\tIce.SyslogFacility=string (Unix only)\n这个属性设置syslog的特色为string。如果[Ice.UseSyslog](#Ice.UseSyslog)没有设置该属性没有效。\nstring可以设置syslog的特色为LOG_AUTH, LOG_AUTHPRIV, LOG_CRON, LOG_DAEMON, LOG_FTP, LOG_KERN, LOG_LOCAL0, LOG_LOCAL1, LOG_LOCAL2, LOG_LOCAL3, LOG_LOCAL4, LOG_LOCAL5, LOG_LOCAL6, LOG_LOCAL7, LOG_LPR, LOG_MAIL, LOG_NEWS, LOG_SYSLOG, LOG_USER, LOG_UUCP。\n默认是LOG_USER。\n\n###### <span id=\"Ice.ThreadInterruptSafe\">Ice.ThreadInterruptSafe</span>,格式:\n\tIce.ThreadInterruptSafe=num (Java)\n如果num大于0,Ice的Java禁用消息缓存通过[Ice.CacheMessageBuffers](#Ice.CacheMessageBuffers)设置为0,并采取必要的措施,以确保正确的Java中断工作。默认值为零。\n\n###### <span id=\"Ice.UseSyslog\">Ice.UseSyslog</span>,格式:\n\tIce.UseSyslog=num (Unix only)\n如果num大于0,一个特殊的记录器安装,而不是标准错误日志syslog服务。syslog标识符是[Ice.ProgramName](#Ice.ProgramName)。使用[Ice.SyslogFacility](#Ice.SyslogFacility)选择syslog特色。\n\n###### <span id=\"Ice.Voip\">Ice.Voip</span>,格式:\n\tIce.Voip=num (Ice Touch only)\n如果num大于0,Ice运行时为所有通过Ice交流器创建的套接字设置kCFStreamNetworkServiceType属性为kCFStreamNetworkServiceTypeVoIP。允许使用此设置的语音应用程序的语音。它保证了套接字不会关闭,当应用程序处于后台时,可以接收数据。有关此设置的信息来自VoIP使用时配置套接字说明,以及适用时。默认是0。\n\n---\n[返回目录](#目录)\n## <span id=\"Freeze\">Freeze</span>\n###### <span id=\"Freeze.DbEnv.env-name.CheckpointPeriod\">Freeze.DbEnv.env-name.CheckpointPeriod</span>,格式:\n\tFreeze.DbEnv.env-name.CheckpointPeriod=num\nFreeze 创建的每一个 Berkeley DB 环境都有一个与其相关联的线程,每隔 num 秒检查一次该环境。缺省值是 120 秒。\n\n###### <span id=\"Freeze.DbEnv.env-name.DbHome\">Freeze.DbEnv.env-name.DbHome</span>,格式:\n\tFreeze.DbEnv.env-name.DbHome=db-home\n定义这个 Freeze 数据库环境的主目录。缺省是 env-name。\n\n###### <span id=\"Freeze.DbEnv.env-name.DbPrivate\">Freeze.DbEnv.env-name.DbPrivate</span>,格式:\n\tFreeze.DbEnv.env-name.DbPrivate=num\n如果 num 被设成大于零的值, Freeze 会让 Berkeley DB 适用进程私有的内存,而不是共享内存。缺省值是 1。要针对正在使用的环境运行 db_archive (或其他 Berkeley DB 实用程序),把这个属性设成零。\n\n###### <span id=\"Freeze.DbEnv.env-name.DbRecoverFatal\">Freeze.DbEnv.env-name.DbRecoverFatal</span>,格式:\n\tFreeze.DbEnv.env-name.DbRecoverFatal=num\n如果 num 被设成大于零的值,当环境被打开时,将进行 “fatal”恢复。缺 省值是 0。\n\n###### <span id=\"Freeze.DbEnv.env-name.EncodingVersion\">Freeze.DbEnv.env-name.EncodingVersion</span>,格式:\n\tFreeze.DbEnv.env-name.EncodingVersion=encoding\n定义key和value的编码和解码的encoding。默认值是[Ice.Default.EncodingVersion](#Ice.Default.EncodingVersion)。\n\n###### <span id=\"Freeze.DbEnv.env-name.LockFile\">Freeze.DbEnv.env-name.LockFile</span>,格式:\n\tFreeze.DbEnv.env-name.LockFile=num\n如果mun大于0,Freeze在数据库环境中创建锁文件以防止其他进程的打开环境。默认值是1。请注意,应用程序不应该禁用锁定文件,因为同时访问同一个环境的多个进程可以导致数据损坏。Freeze.DbEnv.env-name.DbPrivate设置为0,FreezeScript工具会禁用锁文件。\n\n###### <span id=\"Freeze.DbEnv.env-name.OldLogsAutoDelete\">Freeze.DbEnv.env-name.OldLogsAutoDelete</span>,格式:\n\tFreeze.DbEnv.env-name.OldLogsAutoDelete=num\n如果 num 被设成大于零的值,在每次遇到周期性的检查点时 ( 参见 Freeze.DbEnv.env-name.DbCheckpointPeriod),不再使用的老事务日志将 会被删除。缺省值是 1。\n\n###### <span id=\"Freeze.DbEnv.env-name.PeriodicCheckpointMinSize\">Freeze.DbEnv.env-name.PeriodicCheckpointMinSize</span>,格式:\n\tFreeze.DbEnv.env-name.PeriodicCheckpointMinSize=num\nnum 是周期性的检查点的最小尺寸 ( 参见 Freeze.DbEnv.env-name.DbCheckpointPeriod),以 kb 为单位。这个值将传给 Berkeley DB 的检 查点函数。缺省值是 0( 也就是说,没有最小尺寸 )。\n\n###### <span id=\"Freeze.Evictor.env-name.filename.MaxTxSize\">Freeze.Evictor.env-name.filename.MaxTxSize</span>,格式:\n\tFreeze.DbEnv.env-name.PeriodicCheckpointMinSize=num\nFreeze 使用了一个后台线程来保存对数据库的更新。在把许多 facet 合起来 保存时使用了事务。 num 定义的是在每个事务中所保存的 facet 的最大数 目。缺省值是 10 * SaveSizeTrigger ( 参见 Freeze.Evictor.env-name.db-name.SaveSizeTrigger) ;如果这个值为负,实际的值将被设成 100。\n\n###### <span id=\"Freeze.Evictor.env-name.filename.name.BtreeMinKey\">Freeze.Evictor.env-name.filename.name.BtreeMinKey</span>,格式:\n\tFreeze.Evictor.env-name.filename.name.BtreeMinKey=num\nname代表一个数据库名称或索引。该属性设置相当于Berkeley DB数据库设置B-tree minkey。num小于2会被忽略。请查看Berkeley DB的文档描述。\n\n###### <span id=\"Freeze.Evictor.env-name.filename.name.Checksum\">Freeze.Evictor.env-name.filename.name.Checksum</span>,格式:\n\tFreeze.Evictor.env-name.filename.Checksum=num\n如果num大于0,相当于Berkeley DB数据库启用checksums。请查看Berkeley DB的文档描述。\n\n###### <span id=\"Freeze.Evictor.env-name.filename.PageSize\">Freeze.Evictor.env-name.filename.PageSize</span>,格式:\n\tFreeze.Evictor.env-name.filename.PageSize=num\n如果num大于0,该属性设置相当于Berkeley DB数据库设置页大小。请查看Berkeley DB的文档描述。\n\n###### <span id=\"Freeze.Evictor.env-name.filename.PopulateEmptyIndices\">Freeze.Evictor.env-name.filename.PopulateEmptyIndices</span>,格式:\n\tFreeze.Evictor.env-name.filename.PopulateEmptyIndices=num\n当num不是0,并且你创建一个逐出器,该逐出器有一个或多个空索引,createBackgroundSaveEvictor或createTransactionalEvictor会调用,将填充这些索引所对应的facet。当FreezeScript不能改变索引,这是特别有用的在一个FreezeScript的Freeze逐出器改变之后。但是这可以显著减缓的逐出器创造的,如果你有一个空的索引,因为目前在数据库方面没有匹配该索引的类型。默认是0。\n\n###### <span id=\"Freeze.Evictor.env-name.filename.RollbackOnUserException\">Freeze.Evictor.env-name.filename.RollbackOnUserException</span>,格式:\n\tFreeze.Evictor.env-name.filename.RollbackOnUserException=num\n如果num大于0,如果分发出路是一个用户异常,一个事务逐出器回滚该事务。如果num是0(默认),事务逐出器提交事务。\n\n###### <span id=\"Freeze.Evictor.env-name.filename.SavePeriod\">Freeze.Evictor.env-name.filename.SavePeriod</span>,格式:\n\tFreeze.Evictor.env-name.filename.SavePeriod=num\nFreeze 使用了一个后台线程来保存对数据库的更新。在上一次保存的 num 毫秒之后,如果有任何 facet 被创建、修改或销毁,这个后台线程就会醒来 保存这些 facet。如果 num是0,就不进行周期性地保存。缺省值是 60000。\n\n###### <span id=\"Freeze.Evictor.env-name.filename.SaveSizeTrigger\">Freeze.Evictor.env-name.filename.SaveSizeTrigger</span>,格式:\n\tFreeze.Evictor.env-name.filename.SaveSizeTrigger=num\nFreeze 使用了一个后台线程来保存对数据库的更新。如果 num 是 0 或正 数,只要有 num 个或更多的 facet 被创建、修改或销毁,后台线程就会醒来 保存它们。如果 num 为负,后台线程就不会因上述变化而被唤醒。缺省值 是 10。\n\n###### <span id=\"Freeze.Evictor.env-name.filename.StreamTimeout\">Freeze.Evictor.env-name.filename.StreamTimeout</span>,格式:\n\tFreeze.Evictor.env-name.filename.StreamTimeout=num\n当保存线程保存一个对象时,它需要锁定该对象以获得对象的状态的一致的副本。如果锁不能获得在数秒,产生一个致命的错误。如果应用程序注册了一个致命的错误回调,这个回调将被调用,否则程序将立即终止。 当数是0或负数,没有超时。默认是0\n\n###### <span id=\"Freeze.Map.name.BtreeMinKey\">Freeze.Map.name.BtreeMinKey</span>,格式:\n\tFreeze.Map.name.BtreeMinKey=num\n名称可以表示数据库名称或索引名称。该属性设置相当于Berkeley DB数据库设置B-tree minkey。num小于2会被忽略。请查看Berkeley DB的文档描述。\n\n###### <span id=\"Freeze.Map.name.Checksum\">Freeze.Map.name.Checksum</span>,格式:\n\tFreeze.Map.name.Checksum=num\n名称可以表示数据库名称或索引名称。如果num大于0,相当于Berkeley DB数据库启用checksums。请查看Berkeley DB的文档描述。\n\n###### <span id=\"Freeze.Map.name.PageSize\">Freeze.Map.name.PageSize</span>,格式:\n\tFreeze.Map.name.PageSize=num\n名称可以表示数据库名称或索引名称。如果num大于0,该属性设置相当于Berkeley DB数据库设置页大小。请查看Berkeley DB的文档描述。\n\n###### <span id=\"Freeze.Trace.DbEnv\">Freeze.Trace.DbEnv</span>,格式:\n\tFreeze.Trace.DbEnv=num\nFreeze 数据库环境活动的跟踪级别:\n* 0 不跟踪数据库环境的活动 ( 缺省 )。\n* 1 跟踪数据库的打开和关闭。\n* 2 还要跟踪检查点,以及老日志文件的移除。\n\n###### <span id=\"Freeze.Trace.Evictor\">Freeze.Trace.Evictor</span>,格式:\n\tFreeze.Trace.Evictor=num\nFreeze 逐出器活动的跟踪级别:\n* 0 不跟踪逐出器的活动 ( 缺省 )。\n* 1 跟踪 Ice 对象和 facet 的创建和析构、 facet 的流动时间、 facet 的 保存时间、对象逐出 (每 50 个对象)和逐出器的解除激活。\n* 2 还要跟踪对象查找,以及所有对象的逐出。\n* 3 还要跟踪从数据库取回对象的活动。\n\n###### <span id=\"Freeze.Trace.Map\">Freeze.Trace.Map</span>,格式:\n\tFreeze.Trace.Map=num\nFreeze 映射表活动的跟踪级别:\n* 0 不跟踪映射表的活动 ( 缺省 )。\n* 1 跟踪数据库的打开和关闭。\n* 2 还要跟踪迭代器和事务操作,以及底层数据库的引用计数。\n\n---\n[返回目录](#目录)\n## <span id=\"Glacier2\">Glacier2</span>\n###### <span id=\"Glacier2.AddConnectionContext\">Glacier2.AddConnectionContext</span>,格式:\n\tGlacier2.AddConnectionContext=num\n如果num设置为1或2,Glacier2增添了一些键值对的请求上下文,它发出的每个请求。如果num的值设置为1,这些条目添加到所有转发请求的上下文。如果num的值设置为2,环境不仅要求checkpermissions授权权限验证,并调用创建会话管理器。\n\n如果num是非0,glacier2添加以下条目:\n\n* _con.type 返回的连接类型Connection::type。\n* _con.localAddress 本地地址(只限TCL和SSL)。\n* _con.localPort 本地端口(只限TCL和SSL)。\n* _con.remoteAddress 远程地址(只限TCL和SSL)。\n* _con.remotePort 远程端口(只限TCL和SSL)。\n* _con.cipher 密码(只限SSL)。\n* _con.peerCert 首先验证客户端的证书链(只限SSL)。\n\n默认值是0,这意味着不添加任何上下文。\n\n###### <span id=\"Glacier2.AddUserToAllowCategories\">Glacier2.AddUserToAllowCategories</span>,格式:\n\tGlacier2.AddUserToAllowCategories=num\n创建新会话的时候指定是否要添加一个验证用户身份的Glacier2.AllowCategories属性。合法取值如下:\n\n* 0 不添加用户身份(默认)。\n* 1 添加用户身份。\n* 2 添加以下划线开头的用户身份。\n\n这个属性是不推荐,只支持向后兼容。新应用应使用Glacier2.Filter.Category.AcceptUser。\n\n###### <span id=\"Glacier2.Admin.AdapterProperty\">Glacier2.Admin.AdapterProperty</span>,格式:\n\tGlacier2.Admin.AdapterProperty=value\nGlacier2使用名称为Glacier2.Admin的适配器管理对象适配器。因此,适配器属性可用于配置该适配器。 Glacier2.Admin.Endpoints属性必须定义为启用管理对象适配器。Glacier2点管理界面允许远程客户端关闭路由器;我们一般建议端点,只能从防火墙后面使用。\n\n###### <span id=\"Glacier2.AllowCategories\">Glacier2.AllowCategories</span>,格式:\n\tGlacier2.AllowCategories=list\n指定一个空格分隔的身份类别列表。如果这个属性的定义,然后Glacier2路由器只允许请求来自这个list的身份匹配的Ice objects。如果Glacier2.AddUserToAllowCategories定义为非0,路由器会为每个会话自动增加用户标识到这个list。\n这个属性是不推荐,只支持向后兼容。新应用应使用Glacier2.Filter.Category.Accept。因此,适配器属性可用于配置该适配器。 \n\n###### <span id=\"Glacier2.Client.AdapterProperty\">Glacier2.Client.AdapterProperty</span>,格式:\n\tGlacier2.Client.AdapterProperty=value\nGlacier2使用名为Glacier2.Client适配器为客户提供对象适配器。该适配器必须可以被Glacier2的客户端访问。使用安全传输,此适配器是高度推荐的。\n注意,Glacier2.Registry.Client.Endpoints为Glacier2控制客户端。端口4063(TCP)和4064(SSL)为Glacier2通过[Internet Assigned Numbers Authority (IANA)](http://www.iana.org/assignments/service-names-port-numbers/service-names-port-numbers.xhtml)保留。\n\n###### <span id=\"Glacier2.Client.AlwaysBatch\">Glacier2.Client.AlwaysBatch</span>,格式:\n\tGlacier2.Client.AlwaysBatch=num\n如果num大于0,glacier2路由器总是批排队单向客户请求服务器,不管他们的_fwd上下文的价值。此属性仅与[Glacier2.Client.Buffered](#Glacier2.Client.Buffered)启用相关。默认是0。\n\n###### <span id=\"Glacier2.Client.Buffered\">Glacier2.Client.Buffered</span>,格式:\n\tGlacier2.Client.Buffered=num\n如果num大于0,Glacier2路由器在缓冲模式下运行,来自客户端的输入请求会在一个单独的线程中被排队和处理。如果num是0,路由器在非缓冲模式下运行,请求相同的线程下被转发和接收。默认是1。\n\n###### <span id=\"Glacier2.Client.ForwardContext\">Glacier2.Client.ForwardContext</span>,格式:\n\tGlacier2.Client.ForwardContext=num\n如果num大于0,Glacier2路由器包括请求上下文,当从客户端发送请求到服务器。默认是0。\n\n###### <span id=\"Glacier2.Client.SleepTime\">Glacier2.Client.SleepTime</span>,格式:\n\tGlacier2.Client.SleepTime=num\n如果num大于0,Glacier2路由器在转发所有来自客户端的排队请求后,以毫秒为单位的时间睡觉。对于批处理该延时是非常有用的,因为这使得它更容易在一个单一的批次中积累。同样的,如果重写,延迟使得它更可能为覆盖实际生效。此属性仅与[Glacier2.Client.Buffered](#Glacier2.Client.Buffered)启用相关。默认是0。\n\n###### <span id=\"Glacier2.Client.Trace.Override\">Glacier2.Client.Trace.Override</span>,格式:\n\tGlacier2.Client.Trace.Override=num\n如果num大于0,每当请求失效时Glacier2路由器记录一条跟踪消息。默认是0。\n\n###### <span id=\"Glacier2.Client.Trace.Reject\">Glacier2.Client.Trace.Reject</span>,格式:\n\tGlacier2.Client.Trace.Reject=num\n如果num大于0,每当路由器的配置过滤器拒绝客户端的请求时Glacier2路由器记录一条跟踪消息。默认是0。\n\n###### <span id=\"Glacier2.Client.Trace.Request\">Glacier2.Client.Trace.Request</span>,格式:\n\tGlacier2.Client.Trace.Request=num\n如果num大于0,每个来自客户端的请求被转发时Glacier2路由器记录一条跟踪消息。默认是0。\n\n###### <span id=\"Glacier2.CryptPasswords\">Glacier2.CryptPasswords</span>,格式:\n\tGlacier2.CryptPasswords=file\n指定Glacier2访问控制列表的文件名。文件中的每一行必须包含一个用户名和一个加密的密码,用空格隔开,如写一个密码文件。如果[Glacier2.PermissionsVerifier](#Glacier2.PermissionsVerifier)定义,该属性会被忽略。\n\n###### <span id=\"Glacier2.Filter.AdapterId.Accept\">Glacier2.Filter.AdapterId.Accept</span>,格式:\n\tGlacier2.Filter.AdapterId.Accept=list\n指定一个空格分隔的适配器标识符列表。如果定义,Glacier2路由器的过滤请求,以便它只允许请求在这个list匹配标识符的Ice Object的适配器。包含空格的标识符必须用单引号或双引号括起来。单或双引号,出现在一个标识符必须用一个反斜杠转义。\n\n###### <span id=\"Glacier2.Filter.Address.Accept\">Glacier2.Filter.Address.Accept</span>,格式:\n\tGlacier2.Filter.Address.Accept=list\n指定地址端口对的空格分隔列表。当定义了,Glacier2路由器的过滤请求,以便它只允许请求通过代理,包含网络端点信息匹配的地址端口对在这个属性中列出的对象。如果没有定义,默认值是\\*,即允许任何网络地址。该属性接受请求,也许会被[Glacier2.Filter.Address.Reject](#Glacier2.Filter.Address.Reject)拒绝。\n每一对的形式都是address:port。address和port部分可以包含通配符(\\*)或值范围或组。范围或组的格式是[value1, value2, value3, ...]或[value1-value2]。通配符、范围和团体可能出现在地址端口对字符串的任何地方。\n\n###### <span id=\"Glacier2.Filter.Address.Reject\">Glacier2.Filter.Address.Reject</span>,格式:\n\tGlacier2.Filter.Address.Reject=list\n指定地址端口对的空格分隔列表。当定义了,Glacier2路由器拒绝通过代理的Ice Objects请求,包含网络端点信息匹配的地址端口对在这个属性中列出的对象。如果没有定义,路由器总是允许请求任何网络地址,直到[Glacier2.Filter.Address.Accept](#Glacier2.Filter.Address.Accept)被设置,因为[Glacier2.Filter.Address.Accept](#Glacier2.Filter.Address.Accept)属性,请求会被拒绝或允许。如果同时设置[Glacier2.Filter.Address.Accept](#Glacier2.Filter.Address.Accept)和[Glacier2.Filter.Address.Reject](#Glacier2.Filter.Address.Reject),[Glacier2.Filter.Address.Reject](#Glacier2.Filter.Address.Reject)属性优先。\n每一对的形式都是address:port。address和port部分可以包含通配符(\\*)或值范围或组。范围或组的格式是[value1, value2, value3, ...]或[value1-value2]。通配符、范围和团体可能出现在地址端口对字符串的任何地方。\n\n###### <span id=\"Glacier2.Filter.Category.Accept\">Glacier2.Filter.Category.Accept</span>,格式:\n\tGlacier2.Filter.Category.Accept=list\n指定地址端口对的空格分隔列表。当定义了,Glacier2路由器的过滤请求,它只允许匹配在这个属性中列出的类别的Ice Objects的请求。如果[Glacier2.Filter.Category.AcceptUser](#Glacier2.Filter.Category.AcceptUser)定义为非0,路由器会自动添加每个会话的用户名到该列表中。\n包含空格的类别必须用单引号或双引号括起来。单或双引号出现在类别必须用一个反斜杠转义。\n\n###### <span id=\"Glacier2.Filter.Category.AcceptUser\">Glacier2.Filter.Category.AcceptUser</span>,格式:\n\tGlacier2.Filter.Category.AcceptUser=num\n当创建一个新会话时为[Glacier2.Filter.Category.Accept](#Glacier2.Filter.Category.Accept)属性指定是否添加一个认证的用户ID。合法值有:\n\n* 0 不添加用户ID(默认)\n* 1 添加用户ID\n* 2 添加一个以下划线开头的用户ID\n\n###### <span id=\"Glacier2.Filter.Identity.Accept\">Glacier2.Filter.Identity.Accept</span>,格式:\n\tGlacier2.Filter.Identity.Accept=list\n指定一个空格分隔的身份列表。如果定义了,Glacier2路由器过滤请求,它只允许与列表中匹配的Ice Objects的请求。\n包含空格的类别必须用单引号或双引号括起来。单或双引号出现在类别必须用一个反斜杠转义。\n\n###### <span id=\"Glacier2.Filter.ProxySizeMax\">Glacier2.Filter.ProxySizeMax</span>,格式:\n\tGlacier2.Filter.ProxySizeMax=num\n当设置了,Glacier2路由器拒绝大于num的字符串化的代理请求。这有助于保护系统免受攻击。Glacier2会接收任意长度的请求。\n\n###### <span id=\"Glacier2.InstanceName\">Glacier2.InstanceName</span>,格式:\n\tGlacier2.InstanceName=name\n指定一个默认的Glacier2对象身份类别。如果定义,Glacier2管理接口身份变为name/admin,Glacier2路由接口身份变为name/router。默认是Glacier2。\n\n###### <span id=\"Glacier2.PermissionsVerifier\">Glacier2.PermissionsVerifier</span>,格式:\n\tGlacier2.PermissionsVerifier=proxy\n指定实现为控制访问Glacier2会话的Glacier2::PermissionsVerifier接口的对象的代理。路由器调用此代理来验证每个新会话的用户名和密码。从一个安全的连接创建会话中指定对象的[Glacier2.SSLPermissionsVerifier](#Glacier2.SSLPermissionsVerifier)验证。对于简单的配置,您可以指定使用Glacier2.CryptPasswords密码文件的名称。\nGlacier2提供了一个“null”的权限验证对象,接受任何的用户名和密码组合的情况下,在没有认证是必要的。为了可以验证,设置值为instance/NullPermissionsVerifier,instance是[Glacier2.InstanceName](#Glacier2.InstanceName)的值。作为代理的价值,你可以为代理配置附加的使用属性。\n\n###### <span id=\"Glacier2.ReturnClientProxy\">Glacier2.ReturnClientProxy</span>,格式:\n\tGlacier2.ReturnClientProxy=num\n如果num大于0,Glacier2为使用Ice 3.2.0版本之前的客户提供维护向后兼容性。在这种情况下,你也应该定义[Glacier2.Client.PublishedEndpoints](#Glacier2.Client.PublishedEndpoints)指定客户应该使用与路由器的终点。默认是0。例如,如果glacier2路由器所在的网络在防火墙的后面,[Glacier2.Client.PublishedEndpoints](#Glacier2.Client.PublishedEndpoints)应指定防火墙外部端点。\n\n###### <span id=\"Glacier2.RoutingTable.MaxSize\">Glacier2.RoutingTable.MaxSize</span>,格式:\n\tGlacier2.RoutingTable.MaxSize=num\n此属性设置的路由器的路由表大小为num项。如果更多的代理添加到表比这个值大,会根据最近最少使用的方式将代理逐出表。\n基于ICE 3.1及之后的版本,客户端自动重试操作要求驱逐代理和透明地重新添加这样的代理表。\n基于ICE 3.1之前的版本,客户端会接收到ObjectNotExistException,在驱逐代理时。对于这样的老客户,num必须设置一个足够大的值防止这些客户端失败。默认是1000。\n\n###### <span id=\"Glacier2.Server.AdapterProperty\">Glacier2.Server.AdapterProperty</span>,格式:\n\tGlacier2.Server.AdapterProperty=value\nGlacier2为提供给服务器的对象适配器使用名为Glacier2.Server的适配器。因此,适配器属性可用于配置该适配器。该适配器提供访问SessionControl接口和必须可以访问到回调路由器客户端的服务端。\n\n###### <span id=\"Glacier2.Server.AlwaysBatch\">Glacier2.Server.AlwaysBatch</span>,格式:\n\tGlacier2.Server.AlwaysBatch=num\n如果num大于0,glacier2路由器总是批排队从服务端请求客户端,不管他们的_fwd上下文的价值。此属性仅与[Glacier2.Server.Buffered](#Glacier2.Server.Buffered)启用相关。默认是0。\n\n###### <span id=\"Glacier2.Server.Buffered\">Glacier2.Server.Buffered</span>,格式:\n\tGlacier2.Server.Buffered=num\n如果num大于0,Glacier2路由器在缓冲模式下运行,来自服务端的输入请求会在一个单独的线程中被排队和处理。如果num是0,路由器在非缓冲模式下运行,请求相同的线程下被转发和接收。默认是1。\n\n###### <span id=\"Glacier2.Server.ForwardContext\">Glacier2.Server.ForwardContext</span>,格式:\n\tGlacier2.Server.ForwardContext=num\n如果num大于0,Glacier2路由器包括请求上下文,当从服务器发送请求到客户端。默认是0。\n\n###### <span id=\"Glacier2.Server.SleepTime\">Glacier2.Server.SleepTime</span>,格式:\n\tGlacier2.Server.SleepTime=num\n如果num大于0,Glacier2路由器在转发所有来自服务端的排队请求后,以毫秒为单位的时间睡觉。对于批处理该延时是非常有用的,因为这使得它更容易在一个单一的批次中积累。同样的,如果重写,延迟使得它更可能为覆盖实际生效。此属性仅与[Glacier2.Server.Buffered](#Glacier2.Server.Buffered)启用相关。默认是0。\n\n###### <span id=\"Glacier2.Server.Trace.Override\">Glacier2.Server.Trace.Override</span>,格式:\n\tGlacier2.Server.Trace.Override=num\n如果num大于0,每当请求失效时Glacier2路由器记录一条跟踪消息。默认是0。\n\n###### <span id=\"Glacier2.Server.Trace.Request\">Glacier2.Server.Trace.Request</span>,格式:\n\tGlacier2.Server.Trace.Request=num\n如果num大于0,每当路由器的配置过滤器拒绝服务端的请求时Glacier2路由器记录一条跟踪消息。默认是0。\n\n###### <span id=\"Glacier2.SessionManager\">Glacier2.SessionManager</span>,格式:\n\tGlacier2.SessionManager=proxy\n指定实现Glacier2::SessionManager接口的对象的代理。路由器调用该代理为客户端创建一个新会话,但仅在路由器验证该客户的用户名和密码后才可以创建一个新会话。\n作为代理属性,您可以使用属性配置代理的其他方面。\n\n###### <span id=\"Glacier2.SessionTimeout\">Glacier2.SessionTimeout</span>,格式:\n\tGlacier2.SessionTimeout=num\n如果num大于0,一个客户端会话在Glacier2路由器里num秒后过期不能用。默认是0,即永不过期。设置num非常重要这对于客户端会话不过早过期。Active Connection Management (ACM)对客户端连接(连接名为Glacier2.Client的路由器的对象适配器)的会话超时也有效。如果你没有设置为路由器[Glacier2.Client.ACM.Timeout](#Glacier2.Client.ACM.Timeout),路由器使用该属性来控制会话超时。如果没有设置,路由器输入客户端连接使用默认的ACM。\n\n###### <span id=\"Glacier2.SSLPermissionsVerifier\">Glacier2.SSLPermissionsVerifier</span>,格式:\n\tGlacier2.SSLPermissionsVerifier=proxy\n为实现Glacier2::SSLPermissionsVerifier接口的对象指定代理,用来控制访问Glacier2会话。路由器调用此代理来验证客户端的凭据,试图从安全连接创建会话。使用用户名称和密码创建会话来验证[Glacier2.PermissionsVerifier](#Glacier2.PermissionsVerifier)定义的对象。\nGlacier2供应一个“null”权限验证器对象,接受任何客户端证书的情况下,不需要身份验证。为了可以验证,设置值为instance/NullPermissionsVerifier,instance是[Glacier2.InstanceName](#Glacier2.InstanceName)的值。\n作为代理的价值,你可以为代理配置附加的使用属性。\n\n###### <span id=\"Glacier2.Trace.RoutingTable\">Glacier2.Trace.RoutingTable</span>,格式:\n\tGlacier2.Trace.RoutingTable=num\n路由表跟踪等级:\n\n* 0 没有路由表跟踪(默认)\n* 1 每一个代理添加到路由表记录一条信息\n* 2 每一个代理被驱逐出路由表记录一条信息(查看Glacier2.RoutingTable.MaxSize)\n* 3 结合1、2\n\n###### <span id=\"Glacier2.Trace.Session\">Glacier2.Trace.Session</span>,格式:\n\tGlacier2.Trace.Session=num\n如果num大于0,Glacier2路由器日志跟踪消息会话相关的活动。默认是0。\n\n---\n[返回目录](#目录)\n## <span id=\"Ice.ACM\">Ice.ACM</span>\n###### <span id=\"Ice.ACM.Close\">Ice.ACM.Close</span>,格式:\n\tIce.ACM.Close=num\n连接关闭的方式。num值为0、1、2、3、4\n\n* 0 当communicator销毁,网络连接失败或连接端被关闭时直接关闭\n* 1 当连接在一定时间内空闲,没有任何输入输出请求时优雅关闭\n* 2 当连接在一定时间内空闲强制关闭,但有输出请求除外。这个必须配置[Ice.ACM.Client.Heartbeat](#Ice.ACM.Client.Heartbeat)\n* 3 结合了1、2的情况\n* 4 当连接在一定时间内空闲时强制关闭,不管是否有输出或输入请求。\n\n###### <span id=\"Ice.ACM.Heartbeat\">Ice.ACM.Heartbeat</span>,格式:\n\tIce.ACM.Heartbeat=num\n连接心跳。num值为0、1、2、3\n\n* 0 关闭客户端(client)心跳\n* 1 有输入请求时每隔一段时间发送心跳\n* 2 连接空闲时每隔一段时间发送心跳\n* 3 每隔一段时间发送心跳直到连接关闭为止\n\n###### <span id=\"Ice.ACM.Timeout\">Ice.ACM.Timeout</span>,格式:\n\tIce.ACM.Timeout=num\n连接超时,该属性结合[Ice.ACM.Close](#Ice.ACM.Close)和[Ice.ACM.Heartbeat](#Ice.ACM.Heartbeat)使用,默认60,单位秒。\n\n###### <span id=\"Ice.ACM.Client.Close\">Ice.ACM.Client.Close</span>,格式:\n\tIce.ACM.Client.Close=num\n输出端连接关闭的方式,重载[Ice.ACM.Close](#Ice.ACM.Close)\n\n###### <span id=\"Ice.ACM.Client.Heartbeat\">Ice.ACM.Client.Heartbeat</span>,格式:\n\tIce.ACM.Client.Heartbeat=num\n输出端连接心跳,重载[Ice.ACM.Heartbeat](#Ice.ACM.Heartbeat)\n\n###### <span id=\"Ice.ACM.Client.Timeout\">Ice.ACM.Client.Timeout</span>,格式:\n\tIce.ACM.Client.Timeout=num\n输出端连接超时该属性结合[Ice.ACM.Close](#Ice.ACM.Close)和[Ice.ACM.Heartbeat](#Ice.ACM.Heartbeat)使用,重载[Ice.ACM.Timeout](#Ice.ACM.Timeout)\n\n###### <span id=\"Ice.ACM.Server.Close\">Ice.ACM.Server.Close</span>,格式:\n\tIce.ACM.Server.Close=num\n输入端连接关闭的方式,重载[Ice.ACM.Close](#Ice.ACM.Close)\n\n###### <span id=\"Ice.ACM.Server.Heartbeat\">Ice.ACM.Server.Heartbeat</span>,格式:\n\tIce.ACM.Server.Heartbeat=num\n输入端连接心跳,重载[Ice.ACM.Heartbeat](#Ice.ACM.Heartbeat)\n\n###### <span id=\"Ice.ACM.Server.Timeout\">Ice.ACM.Server.Timeout</span>,格式:\n\tIce.ACM.Server.Timeout=num\n输入端连接超时该属性结合[Ice.ACM.Close](#Ice.ACM.Close)和[Ice.ACM.Heartbeat](#Ice.ACM.Heartbeat)使用,重载[Ice.ACM.Timeout](#Ice.ACM.Timeout)\n\n---\n[返回目录](#目录)\n## <span id=\"Ice.Admin\">Ice.Admin</span>\n###### <span id=\"Ice.Admin.AdapterProperty\">Ice.Admin.AdapterProperty</span>,格式:\n\tIce.Admin.AdapterProperty=value\n如果Administrative Facility开启,Ice运行时创建并激活一个名为Ice.Admin的administrative object adapter,[Ice.Admin.Endpoints](#Ice.Admin.Endpoints)是默认的,以下三选一,[Ice.Admin.DelayCreation](#Ice.Admin.DelayCreation)不启动;[Ice.Admin.DelayCreation](#Ice.Admin.DelayCreation)启动并且应用在communicator初始化后调用getAdmin;应用使用一个null作为adminAdapter的参数值来调用createAdmin。\n\n###### <span id=\"Ice.Admin.DelayCreation\">Ice.Admin.DelayCreation</span>,格式:\n\tIce.Admin.DelayCreation=num\n如果num大于0,Ice运行时延后创建Ice.Admin administrative object adapter,直到getAdmin在communicator中调用。默认值是0,即当所有插件(plug-in)s初始化后Ice.Admin object adapter会创建,提供给[Ice.Admin.Endpoints](#Ice.Admin.Endpoints)定义。\n\n###### <span id=\"Ice.Admin.Enabled\">Ice.Admin.Enabled</span>,格式:\n\tIce.Admin.Enabled=num\nnum如果大于0,Administrative Facility开启;如果num是0或负数,Administrative Facility关闭。如果没有设置,[Ice.Admin.Endpoints](#Ice.Admin.Endpoints)定义不为空,并且在其他情况下是关闭,Administrative Facility就会开启\n\n###### <span id=\"Ice.Admin.Facets\">Ice.Admin.Facets</span>,格式:\n\tIce.Admin.Facets=name [name ...]\n\n###### <span id=\"Ice.Admin.InstanceName\">Ice.Admin.InstanceName</span>,格式:\n\tIce.Admin.InstanceName=name\n为administrative object定义一个标志。如果定义了,对象的标志是name/admin,如果没有定义默认是一个UUID。\n\n###### <span id=\"Ice.Admin.Logger.KeepLogs\">Ice.Admin.Logger.KeepLogs</span>,格式:\n\tIce.Admin.Logger.KeepLogs=num\n使用非Ice::TraceMessage类型,缓存num多之前的日志信息,默认值是100,如果小于等于0,不缓存任何日志信息。\n\n###### <span id=\"Ice.Admin.Logger.KeepTraces\">Ice.Admin.Logger.KeepTraces</span>,格式:\n\tIce.Admin.Logger.KeepTraces=num\n使用Ice::TraceMessage类型,缓存num多之前的日志信息,默认值是100,如果小于等于0,不缓存任何日志信息。\n\n###### <span id=\"Ice.Admin.Logger.Properties\">Ice.Admin.Logger.Properties</span>,格式:\n\tIce.Admin.Logger.Properties=propertyList\n如果开启,创建自己拥有的communicator发送日志信息到远程日志文件中。\n\n###### <span id=\"Ice.Admin.ServerId\">Ice.Admin.ServerId</span>,格式:\n\tIce.Admin.ServerId=id\n定义一个进程的唯一标志,当Ice运行时到定位器(locator)注册时,用于注册admin对象。\n\n---\n[返回目录](#目录)\n## <span id=\"Ice.Default\">Ice.Default</span>\n###### <span id=\"Ice.Default.CollocationOptimized\">Ice.Default.CollocationOptimized</span>,格式:\n\tIce.Default.CollocationOptimized=num\n搭配优化,有效避免网络拥堵。默认是1,设为0则关闭搭配优化。\n\n###### <span id=\"Ice.Default.EncodingVersion\">Ice.Default.EncodingVersion</span>,格式:\n\tIce.Default.EncodingVersion=ver\n如果没有定义,在Ice 3.5使用编码版本是1.1。\n\n###### <span id=\"Ice.Default.EndpointSelection\">Ice.Default.EndpointSelection</span>,格式:\n\tIce.Default.EndpointSelection=policy\n多个端点(endpoint)的选择策略,值为Ordered或Random,默认是Random。\n\n###### <span id=\"Ice.Default.Host\">Ice.Default.Host</span>,格式:\n\tIce.Default.Host=host\n如果端点(endpoint)没有使用指定的host name,那么将使用该值。\n\n###### <span id=\"Ice.Default.InvocationTimeout\">Ice.Default.InvocationTimeout</span>,格式:\n\tIce.Default.InvocationTimeout=num\n调用超时设置,单位毫秒,默认是-1,即从不超时。\n\n###### <span id=\"Ice.Default.Locator\">Ice.Default.Locator</span>,格式:\n\tIce.Default.Locator=locator\n为所有代理(proxy)和对象适配器(adapter)定义一个定位器,默认没有。\n\n###### <span id=\"Ice.Default.LocatorCacheTimeout\">Ice.Default.LocatorCacheTimeout</span>,格式:\n\tIce.Default.LocatorCacheTimeout=num\n代理(proxy)的定位缓存超时,单位秒。设为0,不使用缓存。设为-1,缓存有不过期。\n\n###### <span id=\"Ice.Default.Package\">Ice.Default.Package</span>,格式:\n\tIce.Default.Package=package\nIce为Java提供定制报名生成代码。\n\n###### <span id=\"Ice.Default.PreferSecure\">Ice.Default.PreferSecure</span>,格式:\n\tIce.Default.PreferSecure=num\n指定安全端点(endpoint),默认值为0,意味着不可靠的端点(endpoint)优先。\n\n###### <span id=\"Ice.Default.Protocol\">Ice.Default.Protocol</span>,格式:\n\tIce.Default.Protocol=protocol\n指定端点(endpoint)的传输协议,默认tcp。\n\n###### <span id=\"Ice.Default.Router\">Ice.Default.Router</span>,格式:\n\tIce.Default.Router=router\n指定所有代理的默认路由器。值为Glacier2路由器的控制界面的字符化代理。默认的路由器会重写代理的ice_router代理方法。默认值是没有路由器。\n\n###### <span id=\"Ice.Default.SlicedFormat\">Ice.Default.SlicedFormat</span>,格式:\n\tIce.Default.SlicedFormat=num\n指定slice和异常的编码格式。默认是0,即使用简洁格式。\n\n###### <span id=\"Ice.Default.SourceAddress\">Ice.Default.SourceAddress</span>,格式:\n\tIce.Default.SourceAddress=addr\n将输出套接字绑定到该地址的网卡上,允许ip地址。\n\n###### <span id=\"Ice.Default.Timeout\">Ice.Default.Timeout</span>,格式:\n\tIce.Default.Timeout=num\n端点(endpoint)的超时,单位毫秒,默认60000,设为-1,表示无穷大的超时时间,即没有超时。\n\n---\n[返回目录](#目录)\n###### <span id=\"Ice.InitPlugins\">Ice.InitPlugins</span>,格式:\n\tIce.InitPlugins=num\n如果num大于0,Ice运行时自动初始化并加载插件(plug-in),插件(plug-in)加载和初始化顺序通过[Ice.PluginLoadOrder](#Ice.PluginLoadOrder)定义,默认值是1。\n\n---\n[返回目录](#目录)\n###### <span id=\"Ice.IPv4\">Ice.IPv4</span>,格式:\n\tIce.IPv4=num\nIce是否使用IPv4,大于0,表示使用,默认值是1。\n\n---\n[返回目录](#目录)\n###### <span id=\"Ice.IPv6\">Ice.IPv6</span>,格式:\n\tIce.IPv6=num\nIce是否使用IPv6,大于0,表示使用。如果系统支持IPv6,默认值是1,如果不支持,默认值是0。\n\n---\n[返回目录](#目录)\n## <span id=\"Ice.Override\">Ice.Override</span>\n###### <span id=\"Ice.Override.CloseTimeout\">Ice.Override.CloseTimeout</span>,格式:\n\tIce.Override.CloseTimeout=num\n重写关闭连接的超时设置。单位毫秒。-1表示没有超时。如果没有定义,则使用[Ice.Override.Timeout](#Ice.Override.Timeout),如果[Ice.Override.Timeout](#Ice.Override.Timeout)也没有定义,则使用端点(endpoint)的超时。\n\n###### <span id=\"Ice.Override.Compress\">Ice.Override.Compress</span>,格式:\n\tIce.Override.Compress=num\n如果设定,将重写所有代理(proxy)的压缩设置。num大于0,压缩启用。num为0,压缩不使用。该设置在服务角色忽略。\n提示:如果客户端(client)设置Ice.Override.Compress=1并发送一个压缩请求到一个不支持压缩的服务端(server),服务端(server)将会关闭连接,并且客户端(client)收到ConnectionLostException。\n如果一个客户端(client)不支持压缩并设置Ice.Override.Compress=1,设置会被忽略并通过stderr打印警告信息。\n请求小于100字节不会压缩。\n\n###### <span id=\"Ice.Override.ConnectTimeout\">Ice.Override.ConnectTimeout</span>,格式:\n\tIce.Override.ConnectTimeout=num\n重写建立连接的超时设置。单位毫秒。-1表示没有超时。如果没有设置,会使用[Ice.Override.Timeout](#Ice.Override.Timeout),如果[Ice.Override.Timeout](#Ice.Override.Timeout)也没有定义,则使用端点(endpoint)的超时。\n\n###### <span id=\"Ice.Override.Secure\">Ice.Override.Secure</span>,格式:\n\tIce.Override.Secure=num\n重写所有代理(proxy)的安全设置,只允许安全的端点(endpoint)。该属性等价于每个代理(proxy)都调用ice_secure(true)方法。\n\n###### <span id=\"Ice.Override.Timeout\">Ice.Override.Timeout</span>,格式:\n\tIce.Override.Timeout=num\n重写所有端点(endpoint)超时设置。单位毫秒。-1表示没有超时。\n\n---\n[返回目录](#目录)\n## <span id=\"Ice.Plugin\">Ice.Plugin</span>\n###### <span id=\"Ice.Plugin.name\">Ice.Plugin.name</span>,格式:\n\tIce.Plugin.name=entry_point [args]\n定义插件(plug-in)在communicator初始化时安装。\n\n###### <span id=\"Ice.Plugin.name.clr\">Ice.Plugin.name.clr</span>,格式:\n\tIce.Plugin.name.clr=assembly:class [args]\n定义一个.NET插件(plug-in)在communicator初始化时安装。\n\n###### <span id=\"Ice.Plugin.name.cpp\">Ice.Plugin.name.cpp</span>,格式:\n\tIce.Plugin.name.cpp=path[,version]:function [args]\n定义一个C++插件(plug-in)在communicator初始化时安装。\n\n###### <span id=\"Ice.Plugin.name.java\">Ice.Plugin.name.java</span>,格式:\n\tIce.Plugin.name.java=[path:]class [args]\n定义一个Java插件(plug-in)在communicator初始化时安装。\n\n---\n[返回目录](#目录)\n###### <span id=\"Ice.PluginLoadOrder\">Ice.PluginLoadOrder</span>,格式:\n\tIce.PluginLoadOrder=names\n定义插件(plug-in)的加载顺序。\n\n---\n[返回目录](#目录)\n###### <span id=\"Ice.PreferIPv6Address\">Ice.PreferIPv6Address</span>,格式:\n\tIce.PreferIPv6Address=num\n如果IPv4和IPv6同时使用(默认是),当确定hostname时,Ice优先使用IPv6地址。默认为0,如果大于0,优先使用IPv6。\n\n---\n[返回目录](#目录)\n## <span id=\"Ice.TCP\">Ice.TCP</span>\n###### <span id=\"Ice.TCP.Backlog\">Ice.TCP.Backlog</span>,格式:\n\tIce.TCP.Backlog=num\n设置TCP或SSL服务端(server)的端点(endpoint)的监听队列大小。如果没有定义,在C++中如果存在SOMAXCONN就是SOMAXCONN,不存在则是511。在Java和.NET是511。\n\n###### <span id=\"Ice.TCP.RcvSize\">Ice.TCP.RcvSize</span>,格式:\n\tIce.TCP.RcvSize=num\n设置TCP接收缓存大小,单位字节。默认值依赖于本地TCP栈的配置(通常是65535个字节)。\n\n###### <span id=\"Ice.TCP.SndSize\">Ice.TCP.SndSize</span>,格式:\n\tIce.TCP.SndSize=num\n设置TCP发送缓存大小,单位字节。默认值依赖于本地TCP栈的配置(通常是65535个字节)。\n\n--\n[返回目录](#目录)\n## <span id=\"Ice.ThreadPool\">Ice.ThreadPool</span>\n###### <span id=\"Ice.ThreadPool.name.Serialize\">Ice.ThreadPool.name.Serialize</span>,格式:\n\tIce.ThreadPool.name.Serialize=num\n如果大于0,客户端(client)或服务端(server)的线程池序列化每一个连接的所有信息。在最大大小为1的线程池中是不需要的。在多线程池中,启用序列化允许请求从不同的连接同时发送,同时保护每个连接上的消息的顺序。注意序列化对延迟和吞吐量的重要影响。默认是0。\n\n###### <span id=\"Ice.ThreadPool.name.Size\">Ice.ThreadPool.name.Size</span>,格式:\n\tIce.ThreadPool.name.Size=num\n在Ice中线程池基于平均负载因子会动态扩大或缩小。一个线程池总是至少有1个线程,可以随着负载的增加而扩大,最大到[Ice.ThreadPool.name.SizeMax](#Ice.ThreadPool.name.SizeMax)指定的大小。如果最大没有设定,Ice使用num作为最大值。客户端(client)或服务端(server)的线程池初始化大小是num,当在[Ice.ThreadPool.name.ThreadIdleTime](#Ice.ThreadPool.name.ThreadIdleTime)指定的空闲周期,会缩小到1。默认值是1。\n\n###### <span id=\"Ice.ThreadPool.name.SizeMax\">Ice.ThreadPool.name.SizeMax</span>,格式:\n\tIce.ThreadPool.name.SizeMax=num\n客户端(client)或服务端(server)的线程池最大大小。\n\n###### <span id=\"Ice.ThreadPool.name.SizeWarn\">Ice.ThreadPool.name.SizeWarn</span>,格式:\n\tIce.ThreadPool.name.SizeWarn=num\n每当有num条线程在客户端(client)或服务端(server)的线程池中活动,会打印\"low on threads\"的警告,默认是0,禁止警告。\n\n###### <span id=\"Ice.ThreadPool.name.StackSize\">Ice.ThreadPool.name.StackSize</span>,格式:\n\tIce.ThreadPool.name.StackSize=num\n在客户端(client)或服务端(server)的线程池中有num字节大小的栈。默认值是0,即使用操作系统的默认值。\n\n###### <span id=\"Ice.ThreadPool.name.ThreadIdleTime\">Ice.ThreadPool.name.ThreadIdleTime</span>,格式:\n\tIce.ThreadPool.name.ThreadIdleTime=num\n在客户端(client)或服务端(server)的线程池中,Ice会自动回收空闲的线程以节省资源。当线程达到该空闲时间会被回收。单位秒,默认是60。设为0表示从不回收空闲线程。\n\n###### <span id=\"Ice.ThreadPool.name.ThreadPriority\">Ice.ThreadPool.name.ThreadPriority</span>,格式:\n\tIce.ThreadPool.name.ThreadPriority=num\n在客户端(client)或服务端(server)的线程池中指定num优先级的线程。没有设置,Ice会创建Ice.ThreadPriority指定优先级的线程。默认没有设置。\n你可以使用adapter.ThreadPool.ThreadPriority为object adapter重写该属性。\n\n###### <span id=\"Ice.ThreadPriority\">Ice.ThreadPriority</span>,格式:\n\tIce.ThreadPriority=num\n指定num优先级的线程。Ice运行时默认创建指定优先级的线程。没有设置,则按系统默认的优先级创建线程。默认没有设置。\n\n---\n[返回目录](#目录)\n## <span id=\"Ice.Trace\">Ice.Trace</span>\n###### <span id=\"Ice.Trace.Admin.Logger\">Ice.Trace.Admin.Logger</span>,格式:\n\tIce.Trace.Admin.Logger=num\n后台跟踪的等级。\n\n* 0 默认,不跟踪\n* 1 当远程端日志安装或拆卸,跟踪\n* 2 像1,当总是跟踪并发送日志信息给远程端\n\n###### <span id=\"Ice.Trace.Admin.Properties\">Ice.Trace.Admin.Properties</span>,格式:\n\tIce.Trace.Admin.Properties=num\n后台跟踪的属性更新模式\n\n* 0 默认,没有属性\n* 1 添加、修改和移除跟踪属性。\n\n###### <span id=\"Ice.Trace.Locator\">Ice.Trace.Locator</span>,格式:\n\tIce.Trace.Locator=num\nIce运行时创建定位请求来解决端点(endpoint)的对象适配器和已知对象。使用定位器注册表的请求来更新对象适配器端点(endpoint),并设置服务端(server)进程代理。此属性控制的跟踪级别为Ice运行时与定位器的相互作用:\n\n* 0 默认,无定位跟踪\n* 1 跟踪Ice定位器和定位器注册请求。\n* 2 像1,但也从缓存中跟踪删除端点(endpoint)。\n\n###### <span id=\"Ice.Trace.Network\">Ice.Trace.Network</span>,格式:\n\tIce.Trace.Network=num\n控制低级别的网络活动,例如连接建立和读/写操作的跟踪级别:\n\n* 0 默认,无网络跟踪\n* 1 跟踪成功连接的建立和关闭。\n* 2 像1,但也跟踪尝试绑定,连接和断开的套接字。\n* 3 像2,还有跟踪数据传输,发布端点(endpoint)的对象适配器,和当前为端点(endpoint)使用通配符地址的本地地址列表。\n\n###### <span id=\"Ice.Trace.Protocol\">Ice.Trace.Protocol</span>,格式:\n\tIce.Trace.Protocol=num\nIce协议信息的跟踪等级:\n\n* 0 默认,无协议跟踪\n* 1 跟踪Ice协议信息。\n\n###### <span id=\"Ice.Trace.Retry\">Ice.Trace.Retry</span>,格式:\n\tIce.Trace.Retry=num\nIce支持在请求失败的情况下自动重试。此属性控制重试的跟踪级别:\n\n* 0 默认,不重试\n* 1 跟踪Ice操作调用重试\n* 2 还跟踪I端点(endpoint)的使用。\n\n###### <span id=\"Ice.Trace.Slicing\">Ice.Trace.Slicing</span>,格式:\n\tIce.Trace.Slicing=num\n用于异常和允许接收器将未知的异常或类切分为已知类的Ice数据编码。此属性控制用于切片活动的跟踪级别:\n\n* 0 默认,不跟踪切片活动\n* 1 跟踪所有未知的异常和类的切片活动。\n\n###### <span id=\"Ice.Trace.ThreadPool\">Ice.Trace.ThreadPool</span>,格式:\n\tIce.Trace.ThreadPool=num\n控制Ice线程池的跟踪等级:\n\n* 0 默认,不跟踪线程池活动\n* 1 跟踪线程池的创建、扩大和缩小。\n\n---\n[返回目录](#目录)\n## <span id=\"Ice.UDP\">Ice.UDP</span>\n###### <span id=\"Ice.UDP.RcvSize\">Ice.UDP.RcvSize</span>,格式:\n\tIce.UDP.RcvSize=num\n设置UDP接收缓存大小,单位字节。Ice大于28字节的消息数造成DatagramLimitException。默认大小依赖于本地UDP栈的配置,通常是65535和8192字节。值小于28会被忽略。\n小于65535限制的Ice数据包会调整为内核的缓存大小。\n\n###### <span id=\"Ice.UDP.SndSize\">Ice.UDP.SndSize</span>,格式:\n\tIce.UDP.SndSize=num\n设置UDP发送缓存大小,单位字节。Ice大于28字节的消息数造成DatagramLimitException。默认大小依赖于本地UDP栈的配置,通常是65535和8192字节。值小于28会被忽略。\n小于65535限制的Ice数据包会调整为内核的缓存大小。\n\n---\n[返回目录](#目录)\n## <span id=\"Ice.Warn\">Ice.Warn</span>\n###### <span id=\"Ice.Warn.AMICallback\">Ice.Warn.AMICallback</span>,格式:\n\tIce.Warn.AMICallback=num\n如果num的值被设置为大于0,如果AMI回调返回一个异常,会打印一个警告。默认是1。\n\n###### <span id=\"Ice.Warn.Connections\">Ice.Warn.Connections</span>,格式:\n\tIce.Warn.Connections=num\n如果num的值被设置为大于0,在连接中的某些异常情况下的Ice应用打印警告消息。默认是0。\n\n###### <span id=\"Ice.Warn.Datagrams\">Ice.Warn.Datagrams</span>,格式:\n\tIce.Warn.Datagrams=num\n如果num的值被设置为大于0,如果接收一个超出服务端(server)接收缓存大小的数据包,服务端(server)打印警告消息。默认是0。\n\n###### <span id=\"Ice.Warn.Dispatch\">Ice.Warn.Dispatch</span>,格式:\n\tIce.Warn.Dispatch=num\n如果num的值被设置为大于0,当一个输入请求被分发发生某个异常时,Ice应用打印警告消息。\n警告等级:\n\n* 0 没有警告\n* 1 默认。打印Ice::LocalException,Ice::UserException,C++ exceptions,和Java运行时的异常警告\n* 2 像1,但也发出Ice::ObjectNotExistException,Ice::FacetNotExistException,和 Ice::OperationNotExistException警告\n\n###### <span id=\"Ice.Warn.Endpoints\">Ice.Warn.Endpoints</span>,格式:\n\tIce.Warn.Endpoints=num\n如果num的值被设置为大于0,如果一个包含一个端点(endpoint)的序列不代理,无法分析,打印警告。默认是1。\n\n###### <span id=\"Ice.Warn.UnknownProperties\">Ice.Warn.UnknownProperties</span>,格式:\n\tIce.Warn.UnknownProperties=num\n如果num的值被设置为大于0,对未知的对象适配器(object adapter)和代理(proxy),Ice运行时打印警告。默认是1。\n\n###### <span id=\"Ice.Warn.UnusedProperties\">Ice.Warn.UnusedProperties</span>,格式:\n\tIce.Warn.UnusedProperties=num\n如果num的值被设置为大于0,在communicator销毁一些不能读的属性时,Ice运行时打印警告。此警告是有效地检测拼写错误的属性。默认是0。\n\n---\n[返回目录](#目录)\n## <span id=\"IceBox\">IceBox</span>\n###### <span id=\"IceBox.InheritProperties\">IceBox.InheritProperties</span>,格式:\n\tIceBox.InheritProperties=num\n从IceBox Server继承配置,必须大于0,没有定义,默认是0。\n\n###### <span id=\"IceBox.LoadOrder\">IceBox.LoadOrder</span>,格式:\n\tIceBox.LoadOrder=names\n设置服务启动先后顺序。\n\n###### <span id=\"IceBox.PrintServicesReady\">IceBox.PrintServicesReady</span>,格式:\n\tIceBox.PrintServicesReady=token\n当初始化完所有的service就会输出“token ready”。\n\n###### <span id=\"IceBox.Service.name\">IceBox.Service.name</span>,格式:\n\tIceBox.Service.name=entry_point [--key=value] [args]\n定义一个service在IceBox初始化期间加载。name定义service的名称,作为start方法的name参数,必须唯一的。[--key=value]将会被作为property属性,用于构造该服务的communicator,用来更加精确地控制每个Ice服务的性能调优。[args]作为参数传入start方法的参数String[] args中,作为服务的启动初始化参数。\n\n###### <span id=\"IceBox.UseSharedCommunicator.name\">IceBox.UseSharedCommunicator.name</span>,格式:\n\tIceBox.UseSharedCommunicator.name=num\nnum必须大于0,如果没有定义,默认值为0,为其他service共享communicator。\n\n---\n[返回目录](#目录)\n## <span id=\"IceBoxAdmin\">IceBoxAdmin</span>\n###### <span id=\"IceBoxAdmin.ServiceManager.Proxy\">IceBoxAdmin.ServiceManager.Proxy</span>,格式:\n\tIceBoxAdmin.ServiceManager.Proxy=proxy\n此属性配置代理,由iceboxadmin效用用于定位服务管理。\n\n---\n[返回目录](#目录)\n## <span id=\"IceDiscovery\">IceDiscovery</span>\n###### <span id=\"IceDiscovery.Address\">IceDiscovery.Address</span>,格式:\n\tIceDiscovery.Address=addr\n指定用于发送或接收组播请求的组播地址。如果没有定义,默认值取决于其他属性设置:\n\n* 如果[Ice.PreferIPv6Address](#Ice.PreferIPv6Address)启动或Ice.IPv4关闭, IceDiscovery使用ff15::1的IPv6地址\n* 其他情况则IceDiscovery使用239.255.0.1\n\n###### <span id=\"IceDiscovery.DomainId\">IceDiscovery.DomainId</span>,格式:\n\tIceDiscovery.DomainId=id\n指定用于定位对象和对象适配器的域标识。IceDiscovery插件只响应来自具有相同域ID的客户端(client)请求和忽略来自有不同域ID的客户端(client)请求。如果没有定义,默认的域ID是一个空字符串。\n\n###### <span id=\"IceDiscovery.Interface\">IceDiscovery.Interface</span>,格式:\n\tIceDiscovery.Interface=intf\n指定用于发送和接收组播请求的IP地址接口。如果没有定义,操作系统将选择一个默认接口发送和接收组播UDP数据报。\n\n###### <span id=\"IceDiscovery.Locator.AdapterProperty\">IceDiscovery.Locator.AdapterProperty</span>,格式:\n\tIceDiscovery.Locator.AdapterProperty=value\nIceDiscovery创建了一个名为IceDiscovery.Locator对象适配器。\n通常不需要为该对象适配器设置其他属性。\n\n###### <span id=\"IceDiscovery.Lookup\">IceDiscovery.Lookup</span>,格式:\n\tIceDiscovery.Lookup=endpoint\n指定客户端(client)用于发送发现请求的组播端点(endpoint)。如果没有定义,端点(endpoint)是由如下组成:\n\n\tudp -h addr -p port --interface intf\naddr就是IceDiscovery.Address的值,port就是IceDiscovery.Port的值,intf就是IceDiscovery.Interface的值。\n\n###### <span id=\"IceDiscovery.Multicast.AdapterProperty\">IceDiscovery.Multicast.AdapterProperty</span>,格式:\n\tIceDiscovery.Multicast.AdapterProperty=value\nIceDiscovery为接收来自客户端(client)的发现请求,创建名为IceDiscovery.Multicast的对象适配器。如果IceDiscovery.Multicast.Endpoints没有其他的定义,对象适配器的端点(endpoint)是由如下组成:\n\n\tudp -h addr -p port --interface intf\naddr就是IceDiscovery.Address的值,port就是IceDiscovery.Port的值,intf就是IceDiscovery.Interface的值。\n通常不需要为该对象适配器设置其他属性。\n\n###### <span id=\"IceDiscovery.Port\">IceDiscovery.Port</span>,格式:\n\tIceDiscovery.Port=port\n指定用于发送或接收组播请求的组播端口。如果没有设置,默认值是4061。\n\n###### <span id=\"IceDiscovery.Reply.AdapterProperty\">IceDiscovery.Reply.AdapterProperty</span>,格式:\n\tIceDiscovery.Reply.AdapterProperty=value\nIceDiscovery为接收应答组播请求,创建名为IceDiscovery.Reply的对象适配器。如果[IceDiscovery.Reply.Endpoints](#IceDiscovery.Reply.Endpoints)没有其他的定义,对象适配器的端点(endpoint)是由如下组成:\n\n\tudp --interface intf\nintf就是IceDiscovery.Interface的值。此端点(endpoint)不需要一个固定端口。\n通常不需要为该对象适配器设置其他属性。\n\n###### <span id=\"IceDiscovery.RetryCount\">IceDiscovery.RetryCount</span>,格式:\n\tIceDiscovery.RetryCount=num\n指定该插件(plug-in)重新发送UDP组播请求的最大次数。[IceDiscovery.Timeout](#IceDiscovery.Timeout)属性表明插件(plug-in)等待多久重发。默认重试次数为3。\n\n###### <span id=\"IceDiscovery.Timeout\">IceDiscovery.Timeout</span>,格式:\n\tIceDiscovery.Timeout=num\n指定等待UDP组播请求的时间间隔,单位毫秒。如果这个时间间隔内,没有服务端(server)回答,客户端(client)将重试[IceDiscovery.RetryCount](#IceDiscovery.RetryCount)指定次数的请求。默认的超时时间为300。\n\n---\n[返回目录](#目录)\n## <span id=\"IceGrid\">IceGrid</span>\n###### <span id=\"IceGrid.InstanceName\">IceGrid.InstanceName</span>,格式:\n\tIceGrid.InstanceName=name\n为IceGrid对象指定的另一个身份类别。值有:\n\n* name/AdminSessionManager\n* name/AdminSessionManager-replica\n* name/AdminSSLSessionManager\n* name/AdminSSLSessionManager-replica\n* name/NullPermissionsVerifier\n* name/NullSSLPermissionsVerifier\n* name/Locator\n* name/Query\n* name/Registry\n* name/Registry-replica\n* name/RegistryUserAccountMapper\n* name/RegistryUserAccountMapper-replica\n* name/SessionManager\n* name/SSLSessionManager\n默认为IceGrid。\n\n###### <span id=\"IceGrid.Node.AdapterProperty\">IceGrid.Node.AdapterProperty</span>,格式:\n\tIceGrid.Node.AdapterProperty=value\n一个IceGrid节点使用名为IceGrid.Node的注册表联系人与节点通信对象适配器。因此,适配器属性可用于配置该适配器。\n\n###### <span id=\"IceGrid.Node.AllowEndpointsOverride\">IceGrid.Node.AllowEndpointsOverride</span>,格式:\n\tIceGrid.Node.AllowEndpointsOverride=num\n如果设置为非0,一个IceGrid节点允许服务端(server)重写预先的设置,即使服务端(server)是活动的。如果由节点使用对象适配器运行refreshPublishedEndpoints来管理服务端(server),将此设置为一个非零的值是必要的。默认是0。\n\n###### <span id=\"IceGrid.Node.AllowRunningServersAsRoot\">IceGrid.Node.AllowRunningServersAsRoot</span>,格式:\n\tIceGrid.Node.AllowRunningServersAsRoot=num\n如果设置为非0,一个IceGrid节点允许服务端(server)开始由节点以超级用户权限运行。请注意,除非节点使用安全端点(endpoint),否则不得设置此属性;其他情况下,客户端(client)可以在节点的机器上启动超级用户特权的任意进程。默认是0。\n\n###### <span id=\"IceGrid.Node.CollocateRegistry\">IceGrid.Node.CollocateRegistry</span>,格式:\n\tIceGrid.Node.CollocateRegistry=num\n如果num的值被设置为大于0,节点配置IceGrid注册表。配置注册表与IceGrid注册表相同的属性,并独立于IceGrid注册表。\n\n###### <span id=\"IceGrid.Node.Data\">IceGrid.Node.Data</span>,格式:\n\tIceGrid.Node.Data=path\n定义IceGrid节点的数据目录路径。如果在该目录下没有distrib,servers和tmp subdirectories,节点会创建它们。distrib目录包含从IcePatch2服务端(server)下载的分布文件。servers目录保护每一台部署服务端(server)的配置数据。tmp subdirectories目录存放临时文件。\n\n###### <span id=\"IceGrid.Node.DisableOnFailure\">IceGrid.Node.DisableOnFailure</span>,格式:\n\tIceGrid.Node.DisableOnFailure=num\n节点考虑到服务端(server)非正常终止,如非0退出或因为SIGABRT,SIGBUS,SIGILL,SIGFPE或SIGSEGV信号的退出。如果num非0,节点标志这样的服务端(server)为禁用;禁用的服务端(server)无法启动需求。num大于0,服务端(server)在num秒内禁用;num小于0,服务端(server)将一直禁用,或指导确定可用或通过管理启动。默认值为0,在这种情况下的节点不禁用服务端(server)。\n\n###### <span id=\"IceGrid.Node.Name\">IceGrid.Node.Name</span>,格式:\n\tIceGrid.Node.Name=name\n定义IceGrid节点的名称。使用相同的注册表的所有节点必须有唯一的名称;如果有一个节点以相同的名称运行,则拒绝启动节点。此属性必须为每个节点定义。\n\n###### <span id=\"IceGrid.Node.Output\">IceGrid.Node.Output</span>,格式:\n\tIceGrid.Node.Output=path\n定义IceGrid节点的输出目录路径。如果设置,节点重定向启动的服务端(server)的stdout和stderr的输出,名为server.out和server.err 的文件会在该目录下。否则,启动服务端(server)时,所有节点进程共享的stdout和stderr。\n\n###### <span id=\"IceGrid.Node.PrintServersReady\">IceGrid.Node.PrintServersReady</span>,格式:\n\tIceGrid.Node.PrintServersReady=token\n节点管理的所有服务端(server)准备后,IceGrid节点在标准输出打印\"token ready\"。这是非常有用的脚本,希望等到所有的服务端(server)已经开始,并准备使用。\n\n###### <span id=\"IceGrid.Node.ProcessorSocketCount\">IceGrid.Node.ProcessorSocketCount</span>,格式:\n\tIceGrid.Node.ProcessorSocketCount=num\n这个属性设置了套接字处理器的数量。该值会通过套接字命令向icegridadmin节点报告。在Windows Vista(或后续版本),Windows Server 2008(或后续版本)和Linux系统上,Ice运行时会自动设置套接字处理器的数量。在其他系统上,Ice运行时不能从系统获取套接字处理器的数量,你可以使用该属性设置。\n\n###### <span id=\"IceGrid.Node.PropertiesOverride\">IceGrid.Node.PropertiesOverride</span>,格式:\n\tIceGrid.Node.PropertiesOverride=overrides\n定义重写在服务端(server)部署描述符中定义的属性的属性列表。多个用空格来分开。\n\n###### <span id=\"IceGrid.Node.RedirectErrToOut\">IceGrid.Node.RedirectErrToOut</span>,格式:\n\tIceGrid.Node.RedirectErrToOut=num\n如果num的值被设置为大于0,每个启动的服务端(server)的stderr重定向到服务端(server)的stdout。\n\n###### <span id=\"IceGrid.Node.Trace.Activator\">IceGrid.Node.Trace.Activator</span>,格式:\n\tIceGrid.Node.Trace.Activator=num\n活动器跟踪等级:\n\n* 0 默认,没有活动器跟踪\n* 1 跟踪进程的激活、终止\n* 2 像1,但更复杂,在进程激活下,跟踪进程信令和更多的诊断信息\n* 3 像2,但更复杂,在进程激活下,跟踪进程更多的诊断信息\n\n###### <span id=\"IceGrid.Node.Trace.Adapter\">IceGrid.Node.Trace.Adapter</span>,格式:\n\tIceGrid.Node.Trace.Adapter=num\n对象适配器跟踪等级:\n\n* 0 默认,没有对象适配器跟踪\n* 1 跟踪对象适配器的添加、移除\n* 2 像1,但更复杂,包括对象适配器的激活、非激活和更多的诊断信息\n* 3 像2,但更复杂,包括对象适配器过渡状态改变\n\n###### <span id=\"IceGrid.Node.Trace.Patch\">IceGrid.Node.Trace.Patch</span>,格式:\n\tIceGrid.Node.Trace.Patch=num\n碎片跟踪等级:\n\n* 0 默认,没有碎片跟踪\n* 1 显示碎片进程的概要\n* 2 像1,但更复杂,包括下载统计数据\n* 3 像2,但更复杂,包括校验信息\n\n###### <span id=\"IceGrid.Node.Trace.Replica\">IceGrid.Node.Trace.Replica</span>,格式:\n\tIceGrid.Node.Trace.Replica=num\n副本跟踪等级:\n\n* 0 默认,没有副本跟踪\n* 1 节点和副本建会话生命周期的跟踪\n* 2 像1,但更复杂,包括会话建立尝试和失败\n* 3 像2,但更复杂,包括将活着的消息发送到副本\n\n###### <span id=\"IceGrid.Node.Trace.Server\">IceGrid.Node.Trace.Server</span>,格式:\n\tIceGrid.Node.Trace.Server=num\n服务端(server)跟踪等级:\n\n* 0 默认,没有服务端(server)跟踪\n* 1 跟踪服务端(server)的添加、移除\n* 2 像1,但更复杂,包括服务端(server)的激活和非激活,属性更新,和更多的诊断信息\n* 3 像2,但更复杂,包括服务端(server)过渡状态改变\n\n###### <span id=\"IceGrid.Node.UserAccountMapper\">IceGrid.Node.UserAccountMapper</span>,格式:\n\tIceGrid.Node.UserAccountMapper=proxy\n定义一个实现IceGrid::UserAccountMapper接口的对象的代理(proxy),用于指定用户帐号在服务端(server)下启动。IceGrid节点唤起该代理来映射会话标志和用户帐号。\n作为代理属性,您可以使用属性配置代理的其他方面。\n\n###### <span id=\"IceGrid.Node.UserAccounts\">IceGrid.Node.UserAccounts</span>,格式:\n\tIceGrid.Node.UserAccounts=file\n指定IceGrid节点用户帐户映射文件的文件名。每行必须包含文件的标识符和一个用户帐户,由空格分隔。该标识符将与客户端(client)会话标识符匹配。此用户帐户映射文件是使用该节点将会话标识符映射到用户帐户。如果IceGrid.Node.UserAccountMapper定义,则该属性会被忽略。\n\n###### <span id=\"IceGrid.Node.WaitTime\">IceGrid.Node.WaitTime</span>,格式:\n\tIceGrid.Node.WaitTime=num\n定义IceGrid等待服务端(server)激活和失活的周期,单位秒。如果一个服务端(server)被自动激活,并没有在这个时间间隔内注册它的对象适配器端点(endpoint),节点假定服务端(server)存在问题并返回一个空的端点(endpoint)给客户端(client)。如果在这一时间间隔内,服务端(server)正在优雅地关闭,且IceGrid没有发现服务端(server)失活,IceGrid会杀掉服务端(server)。默认值是60秒。\n\n###### <span id=\"IceGrid.Registry.AdminCryptPasswords\">IceGrid.Registry.AdminCryptPasswords</span>,格式:\n\tIceGrid.Registry.AdminCryptPasswords=file\n指定管理客户端(client)的IceGrid注册表访问控制列表的文件名。文件中的每一行必须包含一个用户名和一个密码,用空格隔开。密码必须是MCF编码的字符串。如果没有定义,默认是admin-passwords。如果[IceGrid.Registry.AdminPermissionsVerifier](#IceGrid.Registry.AdminPermissionsVerifier)定义,该属性会被忽略。\n\n###### <span id=\"IceGrid.Registry.AdminPermissionsVerifier\">IceGrid.Registry.AdminPermissionsVerifier</span>,格式:\n\tIceGrid.Registry.AdminPermissionsVerifier=proxy\n定义一个实现Glacier2::PermissionsVerifier接口的对象的代理(proxy),用于访问IceGrid的管理会话。IceGrid注册表调用此代理以验证每一个新的客户端(client)通过IceGrid::Registry接口创建的管理会话。\n作为代理属性,您可以使用属性配置代理的其他方面。\n\n###### <span id=\"IceGrid.Registry.AdminSessionFilters\">IceGrid.Registry.AdminSessionFilters</span>,格式:\n\tIceGrid.Registry.AdminSessionFilters=num\n此属性控制IceGrid是否为IceGrid会话管理器创建会话时建立过滤器。如果num大于0,IceGrid建立它们的过滤器,所以Glacier2限制访问通过getAdmin操作返回的IceGrid::AdminSession对象和IceGrid::Admin对象。如果num为0,IceGrid不建立过滤器,所以访问这些对象是由glacier2配置独立控制。默认是1。\n\n###### <span id=\"IceGrid.Registry.AdminSessionManager.AdapterProperty\">IceGrid.Registry.AdminSessionManager.AdapterProperty</span>,格式:\n\tIceGrid.Registry.AdminSessionManager.AdapterProperty=value\nIceGrid注册表使用名为IceGrid.Registry.AdminSessionManager的是适配器,用于对象适配器处理来自IceGrid管理会话的输入请求。因此,适配器属性可用于配置该适配器。出于安全原因,该对象适配器的定义端点(endpoint)是可选的。如果你定义的端点(endpoint),他们只能访问Glacier2路由器用于创建IceGrid管理会议。\n\n###### <span id=\"IceGrid.Registry.AdminSSLPermissionsVerifier\">IceGrid.Registry.AdminSSLPermissionsVerifier</span>,格式:\n\tIceGrid.Registry.AdminSSLPermissionsVerifier=proxy\n定义一个实现Glacier2::SSLPermissionsVerifier接口的对象的代理(proxy),用于访问IceGrid的管理会话。IceGrid注册表调用此代理以验证客户端通过IceGrid::Registry接口创建安全连接中创建的每一个新的管理会话。\n作为代理属性,您可以使用属性配置代理的其他方面。\n\n###### <span id=\"IceGrid.Registry.Client.AdapterProperty\">IceGrid.Registry.Client.AdapterProperty</span>,格式:\n\tIceGrid.Registry.Client.AdapterProperty=value\nIceGrid使用名为IceGrid.Registry.Client的适配器,用于对象适配器处理来自客户端(client)的输入请求。因此,适配器属性可用于配置该适配器。注意,IceGrid.Registry.Client.Endpoints控制注册表的客户端端点(endpoint)。端口4061(TCP)和4062(SSL)是专为通过互联网数字分配机构(IANA)注册表。\n\n###### <span id=\"IceGrid.Registry.CryptPasswords\">IceGrid.Registry.CryptPasswords</span>,格式:\n\tIceGrid.Registry.CryptPasswords=file\n指定IceGrid注册表访问控制列表的文件。文件中的每一行必须包含一个用户名和一个密码,用空格隔开。密码必须是使用MCF编码的字符串。默认是passwords。如果[IceGrid.Registry.PermissionsVerifier](#IceGrid.Registry.PermissionsVerifier)定义,该属性将会被忽略。\n\n###### <span id=\"IceGrid.Registry.Data\">IceGrid.Registry.Data</span>,格式:\n\tIceGrid.Registry.Data=path\n定义IceGrid注册表数据目录路径。路径中指定的目录必须已存在。\n\n###### <span id=\"IceGrid.Registry.DefaultTemplates\">IceGrid.Registry.DefaultTemplates</span>,格式:\n\tIceGrid.Registry.DefaultTemplates=path\n定义包含默认模板描述符的XML路径名。在Ice分布里,一个样本文件命名为config/templates.xml包含服务端模板,方便为Ice提供服务。\n\n###### <span id=\"IceGrid.Registry.Discovery.AdapterProperty\">IceGrid.Registry.Discovery.AdapterProperty</span>,格式:\n\tIceGrid.Registry.Discovery.AdapterProperty=value\nIceGrid注册表创建一个名为IceGrid.Registry.Discovery的对象适配器,用于接收来自客户端(client)的组播发现请求。如果[IceGrid.Registry.Discovery.Endpoints](#IceGrid.Registry.Discovery.Endpoints)没有其他的定义,此对象适配器的端点(endpoint)构成如下:\n\n\tudp -h addr -p port [--interface intf]\naddr是IceGrid.Registry.Discovery.Address的值,port是IceGrid.Registry.Discovery.Port的值,intf是IceGrid.Registry.Discovery.Interface的值。\n通常不需要为该对象适配器设置其他属性。\n\n###### <span id=\"IceGrid.Registry.Discovery.Address\">IceGrid.Registry.Discovery.Address</span>,格式:\n\tIceGrid.Registry.Discovery.Address=addr\n指定用于接收组播发现查询的组播IP地址。如果没有定义,依赖于Ice.IPv4的设定。如果启动(默认),IceDiscovery使用239.255.0.1地址,其他情况下,IceDiscovery假定应用想使用IPv6并用ff15::1地址替代。此属性是用来撰写的IceGrid.Registry.Discovery对象适配器的端点(endpoint)。\n\n###### <span id=\"IceGrid.Registry.Discovery.Enabled\">IceGrid.Registry.Discovery.Enabled</span>,格式:\n\tIceDiscovery.Enabled=num\n如果num大于0,注册表创建IceGrid.Registry.Discovery对象适配器并监听组播发现查询。默认是1,设为0则关闭组播发现。\n\n###### <span id=\"IceGrid.Registry.Discovery.Interface\">IceGrid.Registry.Discovery.Interface</span>,格式:\n\tIceGrid.Registry.Discovery.Interface=intf\n指定IP地址接口,用于接收组播发现查询。如果没有指定,操作系统会选择默认接口来发送和接收UDP组播数据包。此属性是用来撰写的IceGrid.Registry.Discovery对象适配器的端点(endpoint)。\n\n###### <span id=\"IceGrid.Registry.Discovery.Port\">IceGrid.Registry.Discovery.Port</span>,格式:\n\tIceGrid.Registry.Discovery.Port=port\n指定组播端口,用于接收组播发现查询。默认是4061。此属性是用来撰写的IceGrid.Registry.Discovery对象适配器的端点(endpoint)。\n\n###### <span id=\"IceGrid.Registry.DynamicRegistration\">IceGrid.Registry.DynamicRegistration</span>,格式:\n\tIceGrid.Registry.DynamicRegistration=num\n如果num大于0,定位注册表不请求Ice服务端(server)预先注册对象适配器和副本群,但如果他们不存在,反而会自动创建它们。如果没有定义或num是0,试图注册一个未知对象适配器或副本群适配器激活会因Ice.NotRegisteredException而失败。当adapter.AdapterId属性定义时,对象适配器注册它自己。adapter.ReplicaGroupId属性标识的副本群。\n\n###### <span id=\"IceGrid.Registry.Internal.AdapterProperty\">IceGrid.Registry.Internal.AdapterProperty</span>,格式:\n\tIceGrid.Registry.Internal.AdapterProperty=value\nIceGrid注册表使用名为IceGrid.Registry.Internal的适配器,用于对象适配器处理来自节点和从副本输入请求。因此,适配器属性可用于配置该适配器。\n\n###### <span id=\"IceGrid.Registry.NodeSessionTimeout\">IceGrid.Registry.NodeSessionTimeout</span>,格式:\n\tIceGrid.Registry.NodeSessionTimeout=num\n每一个IceGrid节点建立一个与注册表必须定期更新的会话。如果一个节点在num秒内没有刷新会话,节点的会话会被销毁和部署在该节点上的服务端对新的客户端不可用。默认是30秒。\n\n###### <span id=\"IceGrid.Registry.PermissionsVerifier\">IceGrid.Registry.PermissionsVerifier</span>,格式:\n\tIceGrid.Registry.PermissionsVerifier=proxy\n定义一个实现Glacier2::PermissionsVerifier接口的对象的代理(proxy),用于控制访问IceGrid会话。IceGrid注册表调用此代理以验证客户端通过IceGrid::Registry接口创建的每个新客户会话。\n作为代理属性,您可以使用属性配置代理的其他方面。\n\n###### <span id=\"IceGrid.Registry.ReplicaName\">IceGrid.Registry.ReplicaName</span>,格式:\n\tIceGrid.Registry.ReplicaName=name\n注册表副本名称。如果没有定义,默认值是Master,这是主副本保留的名称。每个注册表副本必须有一个唯一的名称。\n\n###### <span id=\"IceGrid.Registry.ReplicaSessionTimeout\">IceGrid.Registry.ReplicaSessionTimeout</span>,格式:\n\tIceGrid.Registry.ReplicaSessionTimeout=num\n每一个IceGrid注册表副本建立一个与主注册表必须定期更新的会话。如果一个副本在num秒内不刷新会话,副本的会话被摧毁,副本不再从主注册表接收复制信息。如果没有指定,默认值是30秒。\n\n###### <span id=\"IceGrid.Registry.Server.AdapterProperty\">IceGrid.Registry.Server.AdapterProperty</span>,格式:\n\tIceGrid.Registry.Server.AdapterProperty=value\nIceGrid注册表使用名为IceGrid.Registry.Server的适配器,用于对象适配器处理来自服务端的输入请求。因此,适配器属性可用于配置该适配器。\n\n###### <span id=\"IceGrid.Registry.SessionFilters\">IceGrid.Registry.SessionFilters</span>,格式:\n\tIceGrid.Registry.SessionFilters=num\n该属性控制IceGrid是否为创建与IceGrid会话管理器的会话建立过滤器。如果num大于0,IceGrid建立它们的过滤器,所以Glacier2限制访问IceGrid::Query和IceGrid::Session对象,和对象适配器的会话分配。如果num是0,IceGrid不创建过滤器,所以,对象的访问是通过Glacier2配置独立控制。默认是0。\n\n###### <span id=\"IceGrid.Registry.SessionManager.AdapterProperty\">IceGrid.Registry.SessionManager.AdapterProperty</span>,格式:\n\tIceGrid.Registry.SessionManager.AdapterProperty=value\nIceGrid注册表使用名为IceGrid.Registry.SessionManager的适配器,用于对象适配器处理来自客户端(client)会话的输入请求。因此,适配器属性可用于配置该适配器。\n出于安全原因,该对象适配器的定义端点(endpoint)是可选的。如果你定义的端点(endpoint),他们只能访问Glacier2路由器用于创建IceGrid客户端会话。\n\n###### <span id=\"IceGrid.Registry.SessionTimeout\">IceGrid.Registry.SessionTimeout</span>,格式:\n\tIceGrid.Registry.SessionTimeout=num\nIceGrid客户端或管理客户端也许通过注册表建立会话。该会话必须定期刷新。如果客户端在num秒内没有刷新会话,会话会被销毁。默认是30秒。\n\n###### <span id=\"IceGrid.Registry.SSLPermissionsVerifier\">IceGrid.Registry.SSLPermissionsVerifier</span>,格式:\n\tIceGrid.Registry.SSLPermissionsVerifier=proxy\n定义一个实现Glacier2::SSLPermissionsVerifier接口的对象的代理(proxy),用于控制访问IceGrid会话。IceGrid注册表调用此代理以验证客户端通过IceGrid::Registry接口创建安全连接中创建的每个新客户会话。\n作为代理属性,您可以使用属性配置代理的其他方面。\n\n###### <span id=\"IceGrid.Registry.Trace.Adapter\">IceGrid.Registry.Trace.Adapter</span>,格式:\n\tIceGrid.Registry.Trace.Adapter=num\n对象适配器跟踪等级:\n\n* 0 默认,没有对象适配器跟踪\n* 1 对象适配器注册、移除和自我复制\n\n###### <span id=\"IceGrid.Registry.Trace.Application\">IceGrid.Registry.Trace.Application</span>,格式:\n\tIceGrid.Registry.Trace.Application=num\n应用跟踪等级:\n\n* 0 默认,没有应用跟踪\n* 1 对象适配器添加、更新和移除\n\n###### <span id=\"IceGrid.Registry.Trace.Locator\">IceGrid.Registry.Trace.Locator</span>,格式:\n\tIceGrid.Registry.Trace.Locator=num\n定位和定位注册表跟踪等级:\n\n* 0 默认,没有定位跟踪\n* 1 跟踪定位失败的适配器和对象,和失败的注册去、适配器、端点(endpoint)\n* 2 像1,当更详细,包括注册失败的适配器端点(endpoint)\n\n###### <span id=\"IceGrid.Registry.Trace.Node\">IceGrid.Registry.Trace.Node</span>,格式:\n\tIceGrid.Registry.Trace.Node=num\n节点跟踪等级:\n\n* 0 默认,没有节点跟踪\n* 1 跟踪节点的注册、移除\n* 2 像1,当更详细,包括加载统计数据\n \n###### <span id=\"IceGrid.Registry.Trace.Object\">IceGrid.Registry.Trace.Object</span>,格式:\n\tIceGrid.Registry.Trace.Object=num\n对象跟踪等级:\n\n* 0 默认,没有对象跟踪\n* 1 跟踪节点的注册、移除\n \n###### <span id=\"IceGrid.Registry.Trace.Patch\">IceGrid.Registry.Trace.Patch</span>,格式:\n\tIceGrid.Registry.Trace.Patch=num\n碎片跟踪等级:\n\n* 0 默认,没有碎片跟踪\n* 1 显示碎片进展的概要\n\n###### <span id=\"IceGrid.Registry.Trace.Server\">IceGrid.Registry.Trace.Server</span>,格式:\n\tIceGrid.Registry.Trace.Server=num\n服务端跟踪等级:\n\n* 0 默认,没有服务端跟踪\n* 1 跟踪在注册表数据库里添加和移除的服务端\n\n###### <span id=\"IceGrid.Registry.Trace.Session\">IceGrid.Registry.Trace.Session</span>,格式:\n\tIceGrid.Registry.Trace.Session=num\n会话跟踪等级:\n\n* 0 默认,没有客户端或服务端会话跟踪\n* 1 跟踪客户端或服务端会话的添加、移除\n* 2 像1,但更详细,包括保持活着的消息\n\n###### <span id=\"IceGrid.Registry.UserAccounts\">IceGrid.Registry.UserAccounts</span>,格式:\n\tIceGrid.Registry.UserAccounts=file\n指定一个IceGrid注册用户帐户映射文件的文件名。文件中的每一行必须包含一个标识符和一个用户帐户,由空格分隔。该标识符将与客户端会话标识符匹配。如果节点的[IceGrid.Node.UserAccountMapper](#IceGrid.Node.UserAccountMapper)属性设置为IceGrid/RegistryUserAccountMapper代理,此用户帐户映射文件使用IceGrid节点将会话标识符映射到用户帐户。\n\n---\n[返回目录](#目录)\n## <span id=\"IceGridAdmin\">IceGridAdmin</span>\n###### <span id=\"IceGridAdmin.AuthenticateUsingSSL\">IceGridAdmin.AuthenticateUsingSSL</span>,格式:\n\tIceGridAdmin.AuthenticateUsingSSL=num\n如果num大于0,当建立与IceGrid注册表会话时,icegridadmin使用SSL认证。如果没有设定或为0时,icegridadmin使用用户名和密码认证。\n\n###### <span id=\"IceGridAdmin.Discovery.Address\">IceGridAdmin.Discovery.Address</span>,格式:\n\tIceGridAdmin.Discovery.Address=addr\n指定组播IP地址用于发送组播发现查询。如果没有定义,默认依赖于Ice.IPv4点设置,如果启动(默认),客户端使用239.255.0.1地址,其他情况下,客户端假定使用IPv6并且使用ff15::1地址替代。这个属性是用来构成IceGridAdmin.Discovery.Lookup价值。\n\n###### <span id=\"IceGridAdmin.Discovery.Interface\">IceGridAdmin.Discovery.Interface</span>,格式:\n\tIceGridAdmin.Discovery.Interface=intf\n指定IP地址接口用于发送组播发现查询。如果没有定义,系统会选择一个默认的接口发送UDP数据包。这个属性是用来构成IceGridAdmin.Discovery.Lookup价值。\n\n###### <span id=\"IceGridAdmin.Discovery.Lookup\">IceGridAdmin.Discovery.Lookup</span>,格式:\n\tIceGridAdmin.Discovery.Lookup=endpoint\n指定客户端发送组播发现查询的端点(endpoint)。如果没有设定,端点(endpoint)由以下组成:\n\n\tudp -h addr -p port [--interface intf]\naddr是IceGridAdmin.Discovery.Address的值,port是IceGridAdmin.Port的值,intf是IceGridAdmin.Discovery.Interface的值。\n\n###### <span id=\"IceGridAdmin.Discovery.Reply.AdapterProperty\">IceGridAdmin.Discovery.Reply.AdapterProperty</span>,格式:\n\tIceGridAdmin.Discovery.Reply.AdapterProperty=value\n客户端创建一个名为IceGridAdmin.Discovery.Reply对象适配器,用于接收回复的组播发现查询。如果没有定义的[IceGridAdmin.Discovery.Reply.Endpoints](#IceGridAdmin.Discovery.Reply.Endpoints),此对象适配器的端点构成如下:\n\n\tudp [--interface intf]\nintf是IceGridAdmin.Discovery.Interface的值。此端点不需要一个固定端口。\n通常不需要为该对象适配器设置其他属性。\n\n###### <span id=\"IceGridAdmin.Host\">IceGridAdmin.Host</span>,格式:\n\tIceGridAdmin.Host=host\n当同时使用IceGridAdmin.Port,icegridadmin直接连接到指定主机和端口的目标注册表。\n\n###### <span id=\"IceGridAdmin.InstanceName\">IceGridAdmin.InstanceName</span>,格式:\n\tIceGridAdmin.InstanceName=name\n当icegridadmin将要连接时,指定一个IceGrid实例的名称。\n\n###### <span id=\"IceGridAdmin.Password\">IceGridAdmin.Password</span>,格式:\n\tIceGridAdmin.Password=password\n当与IceGrid注册的会话需要认证时,指定icegridadmin的认证密码。安全的原因,你可能不喜欢在一个纯文本的配置属性定义一个密码,在这种情况下你应该忽略此属性允许icegridadmin提示交互。当IceGridAdmin.AuthenticateUsingSSL来启动使用SSL认证时,该属性会被忽略。\n\n###### <span id=\"IceGridAdmin.Port\">IceGridAdmin.Port</span>,格式:\n\tIceGridAdmin.Port=port\n当同时使用IceGridAdmin.Host,icegridadmin直接连接到指定主机和端口的目标注册表。当使用多播发现时,此属性指定用于发送多播发现查询的端口。这个属性是用来构成IceGridAdmin.Discovery.Lookup价值。默认是4061。\n\n###### <span id=\"IceGridAdmin.Replica\">IceGridAdmin.Replica</span>,格式:\n\tIceGridAdmin.Replica=name\n指定icegridadmin应该接触的注册表副本的名称。默认是Master。\n\n###### <span id=\"IceGridAdmin.Trace.Observers\">IceGridAdmin.Trace.Observers</span>,格式:\n\tIceGridAdmin.Trace.Observers=num\n如果num大于0,IceGrid图形管理客户端显示观察回调它接收从注册表跟踪信息。默认是0。\n\n###### <span id=\"IceGridAdmin.Trace.SaveToRegistry\">IceGridAdmin.Trace.SaveToRegistry</span>,格式:\n\tIceGridAdmin.Trace.SaveToRegistry=num\n如果num大于0,IceGrid图形管理客户端显示跟踪信息的修改提交到注册表。默认是0。\n\n###### <span id=\"IceGridAdmin.Username\">IceGridAdmin.Username</span>,格式:\n\tIceGridAdmin.Username=name\n当与IceGrid注册表会话认真时,icegridadmin应该使用指定名称。当[IceGridAdmin.AuthenticateUsingSSL](#IceGridAdmin.AuthenticateUsingSSL)来启动使用SSL认证时,该属性会被忽略。\n\n---\n[返回目录](#目录)\n## <span id=\"IceLocatorDiscovery\">IceLocatorDiscovery</span>\n###### <span id=\"IceLocatorDiscovery.Address\">IceLocatorDiscovery.Address</span>,格式:\n\tIceLocatorDiscovery.Address=addr\n指定组播IP地址,用于发送组播查询。如果没有设定,默认值依赖于其他属性设置:\n\n* 如果Ice.PreferIPv6Address开启,或Ice.IPv4关闭,IceLocatorDiscovery使用IPv6的ff15::1地址\n* 其他情况IceLocatorDiscovery使用239.255.0.1\n\n###### <span id=\"IceLocatorDiscovery.InstanceName\">IceLocatorDiscovery.InstanceName</span>,格式:\n\tIceLocatorDiscovery.InstanceName=name\n指定定位器实例名称。如果你已部署有多个不相关的使用一样的组播地址和端口的定位器,你可以定义该属性来限制你的搜索那些给定已部署的定位器实例结果范围。如果未定义,该插件通过第一个定位器的实例名称来响应查询;如果随后的查询发现具有不同实例名称的定位器,该插件会记录一个消息并忽略该结果。\n\n###### <span id=\"IceLocatorDiscovery.Interface\">IceLocatorDiscovery.Interface</span>,格式:\n\tIceLocatorDiscovery.Interface=intf\n指定接口的IP地址,用于发送组播查询。如果没有定义,系统会选用一个默认的接口发送UDP数据包。\n\n###### <span id=\"IceLocatorDiscovery.Locator.AdapterProperty\">IceLocatorDiscovery.Locator.AdapterProperty</span>,格式:\n\tIceLocatorDiscovery.Locator.AdapterProperty=value\nIceLocatorDiscovery创建一个名为IceLocatorDiscovery.Locator的对象适配器,因此,所有的对象适配器属性可以被设置。\n通常不需要为该对象适配器设置属性。\n\n###### <span id=\"IceLocatorDiscovery.Lookup\">IceLocatorDiscovery.Lookup</span>,格式:\n\tIceLocatorDiscovery.Lookup=endpoint\n指定组播端点(endpoint),客户端用来发送发现查询。如果没有定义,端点是由如下:\n\n\tudp -h addr -p port --interface intf\naddr是IceLocatorDiscovery.Address的值,port是IceLocatorDiscovery.Port的值,intf是IceLocatorDiscovery.Interface的值。\n\n###### <span id=\"IceLocatorDiscovery.Port\">IceLocatorDiscovery.Port</span>,格式:\n\tIceLocatorDiscovery.Port=port\n指定组播端口,用来发送组播查询。如果没有定义,默认是4061。\n\n###### <span id=\"IceLocatorDiscovery.Reply.AdapterProperty\">IceLocatorDiscovery.Reply.AdapterProperty</span>,格式:\n\tIceLocatorDiscovery.Reply.AdapterProperty=value\nIceLocatorDiscovery创建名为IceLocatorDiscovery.Reply的对象适配器,用于接收回复的组播发现查询。如果[IceLocatorDiscovery.Reply.Endpoints](#IceLocatorDiscovery.Reply.Endpoints)没有定义,此对象适配器的端点构成如下:\n\n\tudp --interface intf\nintf是IceLocatorDiscovery.Interface的值。\n此端点不需要一个固定端口。\n通常不需要为该对象适配器设置其他属性。\n\n###### <span id=\"IceLocatorDiscovery.RetryCount\">IceLocatorDiscovery.RetryCount</span>,格式:\n\tIceLocatorDiscovery.RetryCount=num\n指定插件重新发送UDP组播查询的最大次数。如果没有定义,默认是4061。IceLocatorDiscovery.Timeout属性指明插件等待多久重发。默认值是3。\n\n###### <span id=\"IceLocatorDiscovery.RetryDelay\">IceLocatorDiscovery.RetryDelay</span>,格式:\n\tIceLocatorDiscovery.RetryDelay=num\n如果插件没有收到任何回应后重新发送IceLocatorDiscovery.RetryCount指定的次数,在开始新一轮的查询之前插件至少等num毫秒。如果没有定义,默认值是2000。\n\n###### <span id=\"IceLocatorDiscovery.Timeout\">IceLocatorDiscovery.Timeout</span>,格式:\n\tIceLocatorDiscovery.Timeout=num\n指定的时间间隔,以毫秒为单位等待UDP组播查询回复。如果服务端在这个周期内没有回复,客户端会根据IceLocatorDiscovery.RetryCount指定的次数重新发送请求。默认值是300。\n\n---\n[返回目录](#目录)\n## <span id=\"IceMX.Metrics\">IceMX.Metrics</span>\n###### <span id=\"IceMX.Metrics.view.Accept.attribute\">IceMX.Metrics.view.Accept.attribute</span>,格式:\n\tIceMX.Metrics.view.Accept.attribute=regexp\n这个属性定义了一个规则,接受一个检测对象或基于它的一个属性值的操作的监控。如果属性符合指定的正则表达式,并且如果它满足其他接受(Accept)和拒绝(Reject)的过滤器检测对象或操作将被监控。\n例如,接受监测仪表的操作对象或从名为\"MyAdapter\"的对象适配器,你可以设置以下接受权限:\n\n\tIceMX.Metrics.MyView.Accept.parent=MyAdapter\n\n###### <span id=\"IceMX.Metrics.view.Disabled\">IceMX.Metrics.view.Disabled</span>,格式:\n\tIceMX.Metrics.view.Disabled=num\n如果num大于0,标准视图或映射被禁用。此属性对于预设视图或映射是非常有用的。视图最初可以被禁止,不会引起开销并且当需要运行时可以启动。\n\n###### <span id=\"IceMX.Metrics.view.GroupBy\">IceMX.Metrics.view.GroupBy</span>,格式:\n\tIceMX.Metrics.view.GroupBy=delimited attributes\n该属性定义了如何分组,以及如何创建每个度量对象的ID。分组是基于特定的属性的仪表对象或操作。例如,您可以通过操作名称或代理身份来调用度量组。所有具有相同的操作名称或代理身份的调用会使用相同的度量对象记录度量。您可以在多个属性的基础上指定几个属性。当指定GroupBy属性的值时你必须划定属性定界符。一个分隔符字符不是一个数字或字符。可用于指定此属性的值的属性在Ice手册的相关节中被定义。这里有一些例子GroupBy属性。\n\n* IceMX.Metrics.MyView.GroupBy=operation\n* IceMX.Metrics.MyView.GroupBy=identity [operation]\n* IceMX.Metrics.MyView.GroupBy=remoteHost:remotePort\n\n###### <span id=\"IceMX.Metrics.view.Reject.attribute\">IceMX.Metrics.view.Reject.attribute</span>,格式:\n\tIceMX.Metrics.view.Reject.attribute=regexp\n这个属性定义了一个规则,接受一个检测对象或基于它的一个属性的值操作的监控。如果属性符合指定的正则表达式,并且如果它满足其他接受(Accept)和拒绝(Reject)的过滤器检测对象或操作将被监控。例如,拒绝监视仪表或操作命名为\"Ice.Admin\"的对象适配器,您可以设置下列拒绝属性:\n\n* IceMX.Metrics.MyView.Reject.parent=Ice\\.Admin\n\n###### <span id=\"IceMX.Metrics.view.RetainDetached\">IceMX.Metrics.view.RetainDetached</span>,格式:\n\tIceMX.Metrics.view.RetainedDetached=num\n如果num大于0,将num个当前值为0的度量对象通过度量映射保存在内存中。如果一个固定物体或操作创建一个唯一的度量对象,最后只有num个度量对象被保存在内存中,这有效地防止不明确的内存增长。默认值是10,这意味着最多10个当前值为0的度量对象通过度量映射被保留。\n\n---\n[返回目录](#目录)\n## <span id=\"IcePatch2\">IcePatch2</span>\n###### <span id=\"IcePatch2.AdapterProperty\">IcePatch2.AdapterProperty</span>,格式:\n\tIcePatch2.AdapterProperty=value\nIcePatch2为服务端使用名为IcePatch2的适配器。因此,适配器属性可用于配置该适配器。\n\n###### <span id=\"IcePatch2.Directory\">IcePatch2.Directory</span>,格式:\n\tIcePatch2.Directory=dir\n如果没有数据目录在命令行中指定,IcePatch2服务端使用这属性确定数据。在属性也被用于IcePatch2客户端指定本地数据目录。\n\n###### <span id=\"IcePatch2.InstanceName\">IcePatch2.InstanceName</span>,格式:\n\tIcePatch2.InstanceName=name\n为已知的IcePatch2对象指定身份种类。如果定义,IcePatch2::Admin接口的身份变为name/admin和IcePatch2::FileServer接口的身份变为name/server。默认值是IcePatch2。\n\n---\n[返回目录](#目录)\n## <span id=\"IcePatch2Client\">IcePatch2Client</span>\n###### <span id=\"IcePatch2Client.ChunkSize\">IcePatch2Client.ChunkSize</span>,格式:\n\tIcePatch2Client.ChunkSize=kilobytes\nIcePatch2客户端使用该属性确定每次调用getFileCompressed取回多少KB。默认是100。\n\n###### <span id=\"IcePatch2Client.Directory\">IcePatch2Client.Directory</span>,格式:\n\tIcePatch2Client.Directory=dir\nIcePatch2客户端使用这个属性确定本地数据目录。\n\n###### <span id=\"IcePatch2Client.Proxy\">IcePatch2Client.Proxy</span>,格式:\n\tIcePatch2Client.Proxy=proxy\nIcePatch2客户端使用该属性来定位IcePatch2服务端。\n\n###### <span id=\"IcePatch2Client.Remove\">IcePatch2Client.Remove</span>,格式:\n\tIcePatch2Client.Remove=num\n这个属性确定IcePatch2客户端是否删除本地存在而服务端没有的文件。num是0或负数不删除文件。num是1,删除文件并且报出如果删除一个文件失败而导致客户端停止的原因。num是2或更大也开启删除文件,但使客户端在删除过程中忽略错误。默认是1。\n\n###### <span id=\"IcePatch2Client.Thorough\">IcePatch2Client.Thorough</span>,格式:\n\tIcePatch2Client.Thorough=num\n该属性指定IcePatch2客户端是否重新计算校验和。任何大于0的都当是true。默认是0(false)。\n\n---\n[返回目录](#目录)\n## <span id=\"IceSSL\">IceSSL</span>\n#### IceSSL Property Overview\nIceSSL使用许多相同的配置属性实现我们支持的平台。然而,有一些特定的平台或语言的属性。一些属性的象征,如果必要我们列出了支持的平台或底层SSL库的简介和提供额外的特定于平台的笔记。你会看到下面的平台,语言和SSL库的性能参考表:\n\n* SChannel (C++ on Windows)\n* SecureTransport (C++ on OS X)\n* OpenSSL (C++ on Linux)\n* Java\n* .NET\n* iOS\n\n如果没有任何限制,则所有平台都支持一个属性。\n最后,请注意,Objective-C、Python、Ruby、PHP、C++使用IceSSL,因此他们为目标平台选择合适的IceSSL的SChannel,SecureTransport或OpenSSL属性。\n\n###### <span id=\"IceSSL.Alias\">IceSSL.Alias</span>,格式:\n\tIceSSL.Alias=alias (Java)\n从[IceSSL.Keystore](#IceSSL.Keystore)指定的密匙存储中选择一个特别的证书。在认证过程中,由alias标识的证书被提交到对等请求中。\n\n###### <span id=\"IceSSL.CAs\">IceSSL.CAs</span>,格式:\n\tIceSSL.CAs=path (SChannel, SecureTransport, OpenSSL, .NET, iOS)\n指定含有可信任证书颁发机构证书(CAs)的文件或目录的路径名。如果您希望使用与您的平台捆绑的认证证书,不设置该属性并启动[IceSSL.UsePlatformCAs](#IceSSL.UsePlatformCAs)。\n###### 平台提醒\n###### SChannel, SecureTransport, .NET\n证书可以使用DER或PEM格式的编码。\nIceSSL试图定位到上述的path。如果给定的相对路径不存在,IceSSL也试图定位到path相对的通过[IceSSL.DefaultDir](#IceSSL.DefaultDir)定义的目录。\n###### OpenSSL\n文件必须使用PEM格式编码。如果path是一个目录,该目录必须使用OpenSSL的c_rehash工具预先准备好。\nIceSSL试图定位到上述的path。如果给定的相对路径不存在,IceSSL也试图定位到path相对的通过[IceSSL.DefaultDir](#IceSSL.DefaultDir)定义的目录。\n###### iOS\n证书必须使用DER格式的编码。\n如果[IceSSL.DefaultDir](#IceSSL.DefaultDir)也定义,在应用程序资源bundle和在文件系统为DefaultDir/path,IceSSL试图打开指定的Resources/DefaultDir/path的CA证书文件。\n如果[IceSSL.DefaultDir](#IceSSL.DefaultDir)未定义,在应用程序资源bundle和在文件系统为path,IceSSL试图打开指定的Resources/path的CA证书文件。\n如果该属性没有定义,IceSSL在用户的钥匙链和系统的钥匙链查找合适的CA证书。\n###### Java\n查看[IceSSL.Truststore](#IceSSL.Truststore)\n\n###### <span id=\"IceSSL.CertFile\">IceSSL.CertFile</span>,格式:\n\tIceSSL.CertFile=file (SecureTransport, .NET, iOS)\n\tIceSSL.CertFile=file[;file] (SChannel)\n\tIceSSL.CertFile=file[:file] (OpenSSL)\n指定包含程序的证书和相应的私钥的文件。文件名可以指定相对于定义的[IceSSL.DefaultDir](#IceSSL.DefaultDir)默认目录。\n###### 平台提醒\n###### SChannel\n文件必须使用PFX(PKCS#12)格式并且包含私钥的证书。如果需要一个密码来加载该文件,应用程序必须安装一个密码处理程序或使用[IceSSL.Password](#IceSSL.Password)提供密码,否则IceSSL将拒绝证书。\n这个属性接受RSA和DSA证书。要指定两个证书,使用平台的路径字符分隔器分隔文件名。\nIceSSL试图找到指定的文件。如果给定的路径是相对的且不存在,IceSSL也试图定位到path相对的通过[IceSSL.DefaultDir](#IceSSL.DefaultDir)定义的目录。\n###### SecureTransport\n文件必须使用PFX(PKCS#12)格式并且包含私钥的证书。如果需要一个密码来加载该文件,OS X将使用其默认的图形密码的提示,除非应用程序安装了一个密码处理程序或使用[IceSSL.Password](#IceSSL.Password)提供密码。\n###### OpenSSL\n文件必须使用PFX(PKCS#12)格式并且包含私钥的证书。如果需要一个密码来加载该文件,OpenSSL会提示在终端用户除非应用安装了一个密码处理程序或使用[IceSSL.Password](#IceSSL.Password)提供密码。\n这个属性接受RSA和DSA证书。要指定两个证书,使用平台的路径字符分隔器分隔文件名。\nIceSSL试图找到指定的文件。如果给定的路径是相对的且不存在,IceSSL也试图定位到path相对的通过[IceSSL.DefaultDir](#IceSSL.DefaultDir)定义的目录。\n###### .NET\n文件必须使用PFX(PKCS#12)格式并且包含私钥的证书。必须使用[IceSSL.Password](#IceSSL.Password)提供密码来加载文件。\nIceSSL试图找到指定的文件。如果给定的路径是相对的且不存在,IceSSL也试图定位到path相对的通过[IceSSL.DefaultDir](#IceSSL.DefaultDir)定义的目录。\n###### iOS\n文件必须使用PFX(PKCS#12)格式并且包含私钥的证书。必须使用[IceSSL.Password](#IceSSL.Password)提供密码来加载文件。证书通过[IceSSL.Keychain](#IceSSL.Keychain)属性导入到钥匙链识别。\n如果[IceSSL.DefaultDir](#IceSSL.DefaultDir)未定义,在应用程序资源bundle和在文件系统为path,IceSSL试图打开指定的Resources/path的CA证书文件。\n如果该属性没有定义,IceSSL在用户的钥匙链和系统的钥匙链查找合适的CA证书。\n###### Java\n查看[IceSSL.Keystore](#IceSSL.Keystore)。\n\n###### <span id=\"IceSSL.CertStore\">IceSSL.CertStore</span>,格式:\n\tIceSSL.CertStore=name (SChannel, .NET)\n指定证书存储区的名称,用于当经过IceSSL.FindCert时定位证书。name的合法值包含有AddressBook,AuthRoot,CertificateAuthority,Disallowed,My,Root,TrustedPeople和TrustedPublisher。你还可以使用任意值来命名。默认是My。\n\n###### <span id=\"IceSSL.CertStoreLocation\">IceSSL.CertStoreLocation</span>,格式:\n\tIceSSL.CertStoreLocation=CurrentUser|LocalMachine (SChannel, .NET)\n指定书存储区的位置,用于当经过[IceSSL.FindCert](#IceSSL.FindCert)时定位证书。默认是CurrentUser。Ice程序运行为Windows服务通常需要将此属性设置为LocalMachine。\n\n###### <span id=\"IceSSL.CertVerifier\">IceSSL.CertVerifier</span>,格式:\n\tIceSSL.CertVerifier=classname (Java, .NET)\n指定实现IceSSL.CertificateVerifier接口的Java或.NET的类名称来执行应用程序定义的证书验证。\n###### 平台提醒\n###### SChannel, SecureTransport, OpenSSL\nC++应用程序可以安装证书验证程序。\n\n###### <span id=\"IceSSL.CheckCertName\">IceSSL.CheckCertName</span>,格式:\n\tIceSSL.CheckCertName=num\n如果num大于0,IceSSL试图匹配服务器的主机名作为代理端点(endpoint)对服务器证书的主题名称通用名称指定的组件。如果没有匹配,IceSSL试图匹配主机名来反对DNS和IP地址域的服务器证书的主题备用名称。搜索不发出任何DNS查询,但简单地执行一个不区分大小写的字符串匹配。如果在代理端点(endpoint)中它的通用名称或其任何DNS或IP地址匹配主机名,服务端的证书是接受的。IceSSL 跳过这个验证步骤,如果服务器不提供证书,或者如果代理端点(endpoint)不包括主机名和[Ice.Default.Host](#Ice.Default.Host)未定义。此属性对客户证书的服务器验证无影响。如果没有发现匹配,IceSSL中止尝试连接并引发一个异常。默认值是0。\n\n###### <span id=\"IceSSL.CheckCRL\">IceSSL.CheckCRL</span>,格式:\n\tIceSSL.CheckCRL=num (.NET)\n如果num大于0,如果对等端的证书已被吊销,IceSSL通过检查证书吊销列表(CRL)来确定。num决定价值产生的行为:\n\n* 0 关闭CRL检测\n* 1 如果证书被吊销,IceSSL终止连接,记录信息并引发异常。如果证书的吊销状态是未知的,IceSSL记录信息但采用证书。\n* 2 如果证书被吊销或吊销状态未知的,IceSSL终止连接,记录信息并引发异常。\n\n[IceSSL.Trace.Security](#IceSSL.Trace.Security)属性必须设置为非0,来看到CRL相关日志消息的。[IceSSL.CheckCRL](#IceSSL.CheckCRL)的默认值是0。\n\n###### <span id=\"IceSSL.Ciphers\">IceSSL.Ciphers</span>,格式:\n\tIceSSL.Ciphers=ciphers (SChannel, SecureTransport, OpenSSL, Java)\n指定的密码套件来IceSSL允许谈判。密码套件是一套算法,满足四个要求建立一个安全的连接:签名和认证,密钥交换,安全散列,和加密。一些算法满足多个要求,并且有很多可能的组合。\n如果未指定,该插件使用安全供应商的默认密码套件。开启[IceSSL.Trace.Security](#IceSSL.Trace.Security)和仔细审查应用程序的日志输出,以确定默认情况下启用的加密套件,或验证您的密码套件配置。\n###### 平台提醒\n###### SChannel\n此属性的值是一个空格分隔的列表可以包括下列任何:\n\n* 3DES\n* AES_128\n* AES_256\n* DES\n* RC2\n* RC4\n\n例如,下面的设置使AES密码套件:IceSSL.Ciphers=AES_128 AES_256\n###### SecureTransport\n属性值被解释为空格分隔的标记列表。该插件在出现的顺序执行的顺序,以汇编的启用密码套件列表。下面的表格描述了标记:\n\n* ALL 保留关键字,使所有支持的密码套件。如果指定了,它必须是列表中的第一个标记。使用谨慎,因为它可能使低安全性的密码套件。\n* NONE 保留关键字,禁用所有的密码套件。如果指定了,它必须是列表中的第一个标记。使用没有一个开始与一个空的密码套件的设置,然后添加你想要的那些套房。\n* NAME 包括其名称匹配NAME的密码套件。\n* !NAME 不包括其名称匹配NAME的密码套件。\n* (EXP) 包括所有的密码套件的名称包含给定的正则表达式的EXP\n* !(EXP) 不包括所有的密码套件的名称包含给定的正则表达式的EXP\n\n例如,下面的设置禁用所有密码套件除了支持256位AES加密:\n\n\tIceSSL.Ciphers=NONE (AES_256.*SHA256)\n请注意,没有给出一个警告,如果一个未被识别的密码被指定。\n###### OpenSSL\n此属性的值是通过直接的OpenSSL库,支持密码列表取决于您的安装OpenSSL编译。你可以得到一个完整的支持的密码套件使用命令OpenSSL密码列表。这个命令很可能会产生一个长长的列表。为了简化选择过程,OpenSSL支持密码几类。类和密码可以通过前缀与一个感叹号除外。特殊关键词@STRENGTH分类他们的实力,以密码列表,使SSL优先选择更安全的密码进行谈判时的密码套件。@STRENGTH关键字必须是列表中的最后一个元素。类型是:\n\n* ALL 启用所有支持的加密套件。这个类应该谨慎使用,因为它可能使低安全性的密码套件。\n* ADH 匿名密码。\n* LOW 低强度密码。\n* EXP 出口限制密码。\n\n这里是一个合理设置的例子:\n\n\tIceSSL.Ciphers=ALL:!ADH:!LOW:!EXP:!MD5:@STRENGTH\n此值不包括密码低比特强度和已知问题,并命令其余密码根据自己的实力。请注意,没有给出一个警告,如果一个未被识别的密码被指定。\n###### Java\n属性值被解释为空格分隔的标记列表。该插件在出现的顺序执行的顺序,以汇编的启用密码套件列表。下面的表格描述了标记:\n\n* ALL 保留关键字,使所有支持的密码套件。如果指定了,它必须是列表中的第一个标记。使用谨慎,因为它可能使低安全性的密码套件。\n* NONE 保留关键字,禁用所有的密码套件。如果指定了,它必须是列表中的第一个标记。使用NONE一个开始与一个空的密码套件的设置,然后添加你想要的那些套房。\n* NAME 启用密码套件匹配给定的名称。\n* !NAME 禁用密码套件匹配给定的名称。\n* (EXP) 使密码套件的名称包含正则表达式的EXP。例如,NONE (.*DH_anon.*AES.*)只选择的密码套件,使用匿名Diffie-Hellman认证与加密。\n* !(EXP) 禁用密码套件的名称包含正则表达式的EXP。例如,ALL !(.*DH_anon.*AES.*)使除了那些使用匿名Diffie-Hellman认证与加密所有的密码套件。\n\n###### <span id=\"IceSSL.DefaultDir\">IceSSL.DefaultDir</span>,格式:\n\tIceSSL.DefaultDir=path\n指定要查找证书、密钥存储和其他文件的默认目录。有关更多信息的相关属性的描述。\n\n###### <span id=\"IceSSL.DH.bits\">IceSSL.DH.bits</span>,格式:\n\tIceSSL.DH.bits=file (OpenSSL)\n指定包含Diffie-Hellman密钥长度为位参数的文件,如下面的示例所示:\n\n\tIceSSL.DH.1024=dhparams1024.pem\n如果没有指定用户定义的参数所需的密钥长度,IceSSL提供默认参数的密匙长度有512、1024、2048和4096位。参数必须使用PEM格式编码。\nIceSSL试图定位指定的文件;如果给定的路径是相对的且不存在,IceSSL也试图相对于[IceSSL.DefaultDir](#IceSSL.DefaultDir)定义的目录找。\n###### 平台提醒\n###### SChannel\n匿名Diffie-Hellman密码不支持Windows。\n###### SecureTransport\n查看IceSSL.DHParams。\n\n###### <span id=\"IceSSL.DHParams\">IceSSL.DHParams</span>,格式:\n\tIceSSL.DHParams=file (SecureTransport)\n指定包含Diffie-Hellman参数文件。参数必须使用DER格式进行编码。此属性只影响服务端(输入)连接。当通过SSL/TLS与服务端商定时,客户获得DH参数。\nIceSSL试图定位指定的文件;如果给定的路径是相对的且不存在,IceSSL也试图相对于[IceSSL.DefaultDir](#IceSSL.DefaultDir)定义的目录找。\n如果不指定此属性,OS X的过程中产生自己的Diffie-Hellman参数。在运行时计算这些参数可以占用30秒,所以我们建议提前生成和定义这个属性。\n你可以使用OpenSSL命令生成dhparam Diffie-Hellman参数。\n###### 平台提醒\n###### SChannel\n匿名Diffie-Hellman密码不支持Windows。\n###### SecureTransport\n使用IceSSL.DH.bits。\n\n###### <span id=\"IceSSL.EntropyDaemon\">IceSSL.EntropyDaemon</span>,格式:\n\tIceSSL.EntropyDaemon=file (OpenSSL)\n指定一个UNIX域套接字的熵收集精灵,从OpenSSL的聚熵数据初始化随机数发生器。\n\n###### <span id=\"IceSSL.FindCert\">IceSSL.FindCert</span>,格式:\n\tIceSSL.FindCert=criteria (SChannel, SecureTransport, .NET)\n建立将用于身份验证的证书集合。服务端要求身份验证的证书,因此IceSSL选取累计收集第一证书。这通常是通过[IceSSL.CertFile](#IceSSL.CertFile)证书加载,如果定义该属性。否则,IceSSL会通过[IceSSL.FindCert](#IceSSL.FindCert)选择一个确定的证书。\n###### 平台提醒\n###### SChannel, .NET\nIceSSL查询证书存储匹配的证书并将其添加到应用程序的证书集合。[IceSSL.CertStore](#IceSSL.CertStore)和[IceSSL.CertStoreLocation](#IceSSL.CertStoreLocation)的设置确定目标证书存储查询。标准的值可能是*,在该情况下,存储中的所有证书都被选中。否则,标准必须是空白分隔的一个或多个field:value对。下面是有效的字段名:\n\n* Issuer 匹配一个字符串的发行人名称。\n* IssuerDN 匹配发行人的整个区分名称。\n* Serial 匹配证书的序列号。\n* Subject 匹配一个字符串对象的名称。\n* SubjectDN 匹配主体的整个区分名称。\n* SubjectKeyId 匹配证书的主题密钥标识符。\n* Thumbprint 匹配证书的指纹。\n\n字段名称是区分大小写的。如果指定多个标准,则选择符合所有标准的证书。必须用单引号或双引号括起来以保持空格。\n###### SecureTransport\nIceSSL查询匹配的证书链并将其添加到应用程序的证书集合。IceSSL使用密匙链确定[IceSSL.Keychain](#IceSSL.Keychain),或用户的默认钥匙扣如果[IceSSL.Keychain](#IceSSL.Keychain)没有定义。标准的值必须是一个或多个由空格分隔的field:value对。下面是有效的字段名:\n\n* Label 匹配用户可见标签。\n* Serial 匹配证书的序列号。\n* Subject 匹配一个字符串对象的名称。\n* SubjectKeyId 匹配证书的主题密钥标识符。\n\n字段名称是区分大小写的。如果指定多个标准,则选择符合所有标准的证书。必须用单引号或双引号括起来以保持空格。\n###### Java\n使用IceSSL.Alias\n\n###### <span id=\"IceSSL.InitOpenSSL\">IceSSL.InitOpenSSL</span>,格式:\n\tIceSSL.InitOpenSSL=num (OpenSSL)\n指示是否应icessl OpenSSL库执行全局初始化任务。默认值是1,意思是IceSSL将初始化OpenSSL。应用程序可以设置为零,如果它希望自己执行OpenSSL初始化,当应用程序使用多个组件依赖于OpenSSL这可能是有用的。\n\n###### <span id=\"IceSSL.Keychain\">IceSSL.Keychain</span>,格式:\n\tIceSSL.Keychain=name (SecureTransport, iOS)\n指定要导入的证书来确定[IceSSL.CertFile](#IceSSL.CertFile)的密匙链的名字。如果指定了一个密码密匙链,设定[IceSSL.KeychainPassword](#IceSSL.KeychainPassword)。\n###### 平台提醒\n###### SecureTransport\n相对于当前工作目录的相对路径名称。如果指定的钥匙串文件不存在,则会创建一个新文件。如果没有定义,IceSSL使用用户的默认钥匙扣。\n###### iOS\n如果没有定义,钥匙串,命名为登录时默认使用的。请注意,此属性是唯一相关的苹果模拟器和OS X的目标。\n\n###### <span id=\"IceSSL.KeychainPassword\">IceSSL.KeychainPassword</span>,格式:\n\tIceSSL.KeychainPassword=password (SecureTransport, iOS)\n为通过[IceSSL.Keychain](#IceSSL.Keychain)辨认的密匙链指定密码。如果没有定义,IceSSL试图不用密码打开密匙链。\n###### 平台提醒\n###### SecureTransport\n如果没有定义和需要密码的密匙链,OS X系统会在一个图形化的提示用户。\n###### iOS\n此属性是唯一相关的苹果模拟器和OS X的目标。\n\n###### <span id=\"IceSSL.Keystore\">IceSSL.Keystore</span>,格式:\n\tIceSSL.Keystore=file (Java)\n指定包含证书和私钥的密钥存储文件。如果密钥存储区包含多个证书,您应该指定一个特定的用于验证身份的证书用于[IceSSL.Alias](#IceSSL.Alias)。IceSSL首先尝试打开文件作为一个类装载器资源,然后作为一个普通的文件。如果给定的路径是相对的而不存在,IceSSL也试图找到它相对于[IceSSL.DefaultDir](#IceSSL.DefaultDir)定义的默认目录。通过[IceSSL.KeystoreType](#IceSSL.KeystoreType)确定文件格式。\n如果这个属性没有定义,应用程序将无法在SSL握手中提供证书。其结果是,该应用程序可能无法协商安全的连接,或可能需要使用一个匿名密码套件。\n\n###### <span id=\"IceSSL.KeystorePassword\">IceSSL.KeystorePassword</span>,格式:\n\tIceSSL.KeystorePassword=password (Java)\n指定[IceSSL.Keystore](#IceSSL.Keystore)定义的密钥存储用于完整验证的密码。如果没有定义此属性,会跳过完整验证。\n在配置文件中使用纯文本密码是存在安全风险。\n\n###### <span id=\"IceSSL.KeystoreType\">IceSSL.KeystoreType</span>,格式:\n\tIceSSL.KeystoreType=type (Java)\n指定通过[IceSSL.Keystore](#IceSSL.Keystore)定义的密钥存储文件格式。合法的值是JKS和PKCS12。如果没有定义,JVM默认是使用(通常的JKS)。\n\n###### <span id=\"IceSSL.Password\">IceSSL.Password</span>,格式:\n\tIceSSL.Password=password\n指定要解密私钥需要的密码。\n在配置文件中使用纯文本密码是存在安全风险。\n###### 平台提醒\n###### SChannel, SecureTransport, OpenSSL\n此属性提供密码,用来保护私钥的,包含在[IceSSL.CertFile](#IceSSL.CertFile)定义的文件里。\n###### Java\n此属性提供密码,用来保护私钥的,包含在[IceSSL.Keystore](#IceSSL.Keystore)定义的密钥存储里。密钥库中的所有密钥必须使用相同的密码。\n###### .NET\n此属性提供密码,用来保护[IceSSL.CertFile](#IceSSL.CertFile)定义的文件。\n###### iOS\n此属性提供密码,用来保护[IceSSL.CertFile](#IceSSL.CertFile)定义的文件。\n\n###### <span id=\"IceSSL.PasswordCallback\">IceSSL.PasswordCallback</span>,格式:\n\tIceSSL.PasswordCallback=classname (Java, .NET)\n指定实现IceSSL.PasswordCallback接口的Java或.NET类名。使用一个密码回调比在一个普通的文本配置文件中指定一个密码一种更安全的选择。\n###### 平台提醒\n###### SChannel, SecureTransport, OpenSSL\n在插件里使用setPasswordPrompt方法来安装一个密码回调。\n\n###### <span id=\"IceSSL.PasswordRetryMax\">IceSSL.PasswordRetryMax</span>,格式:\n\tIceSSL.PasswordRetryMax=num (SChannel, SecureTransport, OpenSSL)\n指定输入密码时,允许用户尝试创建num个IceSSL。如果没有定义,默认值是3。\n\n###### <span id=\"IceSSL.Protocols\">IceSSL.Protocols</span>,格式:\n\tIceSSL.Protocols=list (SChannel, SecureTransport, OpenSSL, Java, .NET)\nSSL握手时指定允许的协议。合法值有SSL3,TLS1,TLS1_0(别名时TLS1),TLS1_1和TLS1_2。您还可以指定多个值,用逗号或空格分隔。如果未定义此属性,则默认设置如下:\n\n\tIceSSL.Protocols=TLS1_0, TLS1_1, TLS1_2\n在C#里,TLS1_1和TLS1_2选项是必须的,.NET4.5或更高版本。\n在Ice3.6,默认禁止SSLv3。\n###### 平台提醒\n###### SecureTransport\n使用[IceSSL.ProtocolVersionMin](#IceSSL.ProtocolVersionMin)和[IceSSL.ProtocolVersionMax](#IceSSL.ProtocolVersionMax)。\n\n###### <span id=\"IceSSL.ProtocolVersionMax\">IceSSL.ProtocolVersionMax</span>,格式:\n\tIceSSL.ProtocolVersionMax=prot (SecureTransport)\nSSL握手时指定允许最大的协议。合法值有SSL3,TLS1,TLS1_0(别名时TLS1),TLS1_1和TLS1_2。默认是TLS1_0。\n###### 平台提醒\n###### SChannel, OpenSSL, Java, .NET\n使用[IceSSL.Protocols](#IceSSL.Protocols)。\n\n###### <span id=\"IceSSL.ProtocolVersionMin\">IceSSL.ProtocolVersionMin</span>,格式:\n\tIceSSL.ProtocolVersionMin=prot (SecureTransport)\nSSL握手时指定允许最小的协议。合法值有SSL3,TLS1,TLS1_0(别名时TLS1),TLS1_1和TLS1_2。如果没有定义,平台默认使用。\n###### 平台提醒\n###### SChannel, OpenSSL, Java, .NET\n使用[IceSSL.Protocols](#IceSSL.Protocols)。\n\n###### <span id=\"IceSSL.Random\">IceSSL.Random</span>,格式:\n\tIceSSL.Random=filelist (OpenSSL, Java)\n指定在播种随机数生成器时使用的数据的一个或多个文件。文件名应该使用该平台的路径分隔符分隔。\n###### 平台提醒\n###### OpenSSL\nIceSSL试图找到每个文件指定;如果给定的路径是相对的且不存在,IceSSL也试图相对于[IceSSL.DefaultDir](#IceSSL.DefaultDir)定义的目录找。\n###### Java\nIceSSL首先尝试每个文件作为一个类装载器资源,然后作为一个普通的文件打开。如果给定的路径是相对的且不存在,IceSSL也试图相对于[IceSSL.DefaultDir](#IceSSL.DefaultDir)定义的目录找。\n\n###### <span id=\"IceSSL.Trace.Security\">IceSSL.Trace.Security</span>,格式:\n\tIceSSL.Trace.Security=num (SChannel, SecureTransport, OpenSSL, Java, .NET)\nSSL插件跟踪等级:\n\n* 0 默认,没有安全跟踪\n* 1 显示SSL连接的诊断信息。 \n\n###### <span id=\"IceSSL.TrustOnly\">IceSSL.TrustOnly</span>,格式:\n\tIceSSL.TrustOnly=ENTRY[;ENTRY;...] (SChannel, SecureTransport, OpenSSL, Java, .NET)\n标识的信任和不信任的同伴。这一系列的属性提供了一个额外的身份验证级别,使用证书的专有名称(DN)来决定是否接受或拒绝连接。\n根据RFC 2253规定的格式,在每个属性值的ENTRY由相对可分辨名称(RDN)组成。具体来说,该部件必须用逗号隔开,并包含逗号的任何组件必须在引号中被省略或括在引号中。例如,以下2个属性定义是等价的:\n\n\tIceSSL.TrustOnly=O=\"Acme, Inc.\",OU=Sales\n\tIceSSL.TrustOnly=O=Acme\\, Inc.,OU=\"Sales\"\n使用分号分隔多个条目中的属性:\n\t\n\tIceSSL.TrustOnly=O=Acme\\, Inc.,OU=Sales;O=Acme\\, Inc.,OU=Marketing\n默认情况下,每个条目都代表一个接受条目。一个!字符在条目的前面出现表示拒绝。属性中的项的顺序不重要。\n在SSL引擎已成功完成认证过程,IceSSL评估在试图找到入口相匹配的证书的DN相关IceSSL.TrustOnly性质。为了匹配成功,对等的DN必须包含精确匹配,在一个条目中所有的RDN元件中。一个条目可能包含许多RDN成分如你期待,这取决于你需要限制访问的严格程度。在入口的RDN组件的顺序并不重要。\n下面描述的连接语义:\n\n* IceSSL中止连接如果任何拒绝或接受项被定义和对对等端不提供证书。\n* IceSSL中止连接如果如果对等端的DN匹配任何拒绝进入条件。(即使对等端的DN也符合接受条件。这是真的)\n* IceSSL接受连接如果对等端的DN匹配任何接受进入条件,或者如果没有接受项定义。\n\n我们最初的例子限制在销售和营销部门的人:\n\t\n\tIceSSL.TrustOnly=O=Acme\\, Inc.,OU=Sales;O=Acme\\, Inc.,OU=Marketing\n如果在这些部门中拒绝访问某些个人的访问,如果它是必要的,你可以添加一个拒绝输入和重新启动程序:\n\n\tIceSSL.TrustOnly=O=Acme\\, Inc.,OU=Sales; O=Acme\\, Inc.,OU=Marketing; !O=Acme\\, Inc.,CN=John Smith\n当测试您的信任配置,你也许会发现设置IceSSL.Trace.Security为0非常有效,因为IceSSL显示DN在每个对等端建立连接时。\n这个属性影响输入和输出连接。IceSSL也支持类似的只影响输入连接或输出连接的属性。\n\n###### <span id=\"IceSSL.TrustOnly.Client\">IceSSL.TrustOnly.Client</span>,格式:\n\tIceSSL.TrustOnly.Client=ENTRY[;ENTRY;...] (SChannel, SecureTransport, OpenSSL, Java, .NET)\n\tIceSSL.TrustOnly.Client=ID (iOS)\n定义信任和不信任的对等端(客户端)连接的身份。这些条目的该属性要结合[IceSSL.TrustOnly](#IceSSL.TrustOnly)。\n###### 平台提醒\n###### iOS\n一个输出连接成功,对等端证书的主题密匙身份必须匹配确定的属性值。属性值的格式必须是一串由冒号或空格分割的十六进制,如下面的示例所示:\n\n\tC2:E8:D3:33:D7:83:99:6E:08:F7:C2:34:31:F7:1E:8E:44:87:38:57\n这仅当结合[IceSSL.CertAuthFile](#IceSSL.CertAuthFile)使用,因此这个值是特定于一个证书颁发机构。\n\n###### <span id=\"IceSSL.TrustOnly.Server\">IceSSL.TrustOnly.Server</span>,格式:\n\tIceSSL.TrustOnly.Server=ENTRY[;ENTRY;...] (SChannel, SecureTransport, OpenSSL, Java, .NET)\n定义信任和不信任的对等端(服务端)连接的身份。这些条目的该属性要结合[IceSSL.TrustOnly](#IceSSL.TrustOnly)。使用[IceSSL.TrustOnly.Server.AdapterName](#IceSSL.TrustOnly.Server.AdapterName)为一个特定的对象适配器配置信任和不信任的对等端。\n\n###### <span id=\"IceSSL.TrustOnly.Server.AdapterName\">IceSSL.TrustOnly.Server.AdapterName</span>,格式:\n\tIceSSL.TrustOnly.Server.AdapterName=ENTRY[;ENTRY;...] (SChannel, SecureTransport, OpenSSL, Java, .NET)\n定义信任和不信任的对等端(服务端)来连接AdapterName对象适配器。这些条目的该属性要结合[IceSSL.TrustOnly](#IceSSL.TrustOnly)和[IceSSL.TrustOnly.Server](#IceSSL.TrustOnly.Server)。\n\n###### <span id=\"IceSSL.Truststore\">IceSSL.Truststore</span>,格式:\n\tIceSSL.Truststore=file (Java)\n指定包含证书颁发机构的证书的密钥存储区文件。IceSSL首先尝试打开文件作为一个类装载器资源,然后作为一个普通的文件。如果给定的路径是相对的而不存在,IceSSL也试图找到它的位置相对于[IceSSL.DefaultDir](#IceSSL.DefaultDir)所定义的默认目录。由[IceSSL.TruststoreType](#IceSSL.TruststoreType)确定文件格式。\n如果该属性没有定义,IceSSL默认使用[IceSSL.Keystore](#IceSSL.Keystore)值。如果没有信任库指定和密钥库不包含一个有效的证书链,在SSL握手时应用程序将无法进行身份验证的证书。其结果是,应用程序可能无法协商安全的连接,或可能需要使用一个匿名密码套件。\n###### 平台提醒\n###### SChannel, SecureTransport, OpenSSL, .NET\n使用[IceSSL.CAs](#IceSSL.CAs)。\n\n###### <span id=\"IceSSL.TruststorePassword\">IceSSL.TruststorePassword</span>,格式:\n\tIceSSL.TruststorePassword=password (Java)\n指定密码用于验证完整的[IceSSL.Truststore](#IceSSL.Truststore)定义的密匙存储。如果没有定义该属性,会跳过完整检测。\n在配置文件中使用纯文本密码是存在安全风险。\n###### 平台提醒\n###### SChannel, SecureTransport, OpenSSL, .NET\n使用[IceSSL.Password](#IceSSL.Password)。\n\n###### <span id=\"IceSSL.TruststoreType\">IceSSL.TruststoreType</span>,格式:\n\tIceSSL.TruststoreType=type (Java)\n指定[IceSSL.Truststore](#IceSSL.Truststore)定义的密钥存储文件的格式。合法的值有JKS和PKCS12。默认值是JKS。\n\n###### <span id=\"IceSSL.UsePlatformCAs\">IceSSL.UsePlatformCAs</span>,格式:\n\tIceSSL.UsePlatformCAs=num\n如果num大于0,IceSSL使用平台捆绑的根证书颁发机构。如果[IceSSL.CAs](#IceSSL.CAs)定义,该属性会被忽略。默认是0。\n\n###### <span id=\"IceSSL.VerifyDepthMax\">IceSSL.VerifyDepthMax</span>,格式:\n\tIceSSL.VerifyDepthMax=num (SChannel, SecureTransport, OpenSSL, Java, .NET)\n指定一个可信任的对等端证书链的最大深度,包括对等端的证书。任何长度的链接受0值。默认值是3。\n\n###### <span id=\"IceSSL.VerifyPeer\">IceSSL.VerifyPeer</span>,格式:\n\tIceSSL.VerifyPeer=num (SChannel, SecureTransport, OpenSSL, Java, .NET)\n指定在SSL握手时使用验证要求。合法值如下所示。如果未定义此属性,则默认值为2。\n\n* 0 对于一个输出连接,客户端验证服务端的证书(如果不使用匿名密码),如果验证失败,则不中止连接。对于传入的连接,服务端不请求来自客户端的证书。\n* 1 一个输出的连接,客户端验证服务端的证书并中止连接如果验证失败。对于输入的连接,服务端请求客户端的证书和验证如果提供,如果验证失败中止连接。\n* 2 对于一个输出连接,语义是相同的值1。对于输入的连接,服务端要求客户端证书并中止连接如果验证失败。\n\n###### 平台提醒\n###### .NET\n在输出连接中该属性没有影响,由于.NET总是使用的值2。对于输入的连接,值0和值1具有相同的语义。\n\n---\n[返回目录](#目录)\n## <span id=\"IceStorm Properties\">IceStorm Properties</span>\n###### <span id=\"service.Discard.Interval\">service.Discard.Interval</span>,格式:\n\tservice.Discard.Interval=num\n一个IceStorm服务端检测到转发事件给一个订阅者不起作用时,在这点上,在尝试再转发事件之前num秒试图停止投递给订阅者。默认值是60秒。\n\n###### <span id=\"service.Election.ElectionTimeout\">service.Election.ElectionTimeout</span>,格式:\n\tservice.Election.ElectionTimeout=num\n该属性用于IceStorm副本的部署。它指定了一个协调器试图形成较大的复制组的以秒为单位的时间间隔。默认值是10。\n\n###### <span id=\"service.Election.MasterTimeout\">service.Election.MasterTimeout</span>,格式:\n\tservice.Election.MasterTimeout=num\n该属性用于IceStorm副本的部署。它指定以秒为单位的时间间隔,一个奴隶检查协调器的状态。默认值是10。\n\n###### <span id=\"service.Election.ResponseTimeout\">service.Election.ResponseTimeout</span>,格式:\n\tservice.Election.ResponseTimeout=num\n该属性用于IceStorm副本的部署。它指定以秒为单位的时间间隔,一个副本等待回复的邀请,形成一个较大的组。较低优先级的副本等待时间间隔成反比的最大优先级:\n\n\tResponseTimeout + ResponseTimeout * (max - pri)\n默认值是10。\n\n###### <span id=\"service.Flush.Timeout\">service.Flush.Timeout</span>,格式:\n\tservice.Flush.Timeout=num\n定义事件被发送到批处理用户的以毫秒为单位的时间间隔。默认值是1000ms。\n\n###### <span id=\"service.InstanceName\">service.InstanceName</span>,格式:\n\tservice.InstanceName=name\n为所有通过IceStorm对象适配器主持的对象定义一个交替的身份分类。默认识别分类是IceStorm。\n\n###### <span id=\"service.Node.AdapterProperty\">service.Node.AdapterProperty</span>,格式:\n\tservice.Node.AdapterProperty=value\n在一个副本部署中,IceStorm为副本节点对象使用名为service.Node的适配器。因此,适配器属性可以用来配置本适配器。\n\n###### <span id=\"service.NodeId\">service.NodeId</span>,格式:\n\tservice.NodeId=value\n定义IceStorm副本的节点ID,vaule是一个非负整数。节点ID也用作副本的优先权,一个更大的值分配更高优先级的副本。具有最高优先级的副本成为其组的协调器。此属性必须定义为每个副本。\n\n###### <span id=\"service.Nodes.id\">service.Nodes.id</span>,格式:\n\tservice.Nodes.id=value\n此属性用于高可用IceStorm手动部署,每个副本必须被显式地配置为所有其他副本的代理。该值是一个给定的节点标识的副本的代理。副本对象标识的形式是实例名称/节点ID(instance-name/nodeid),如demoicestorm/NODE2。\n\n###### <span id=\"service.Publish.AdapterProperty\">service.Publish.AdapterProperty</span>,格式:\n\tservice.Publish.AdapterProperty=value\nIceStorm为处理来自发布者的输入请求的对象适配器使用名为service.Publish的适配器。因此,适配器属性可用于配置该适配器。\n\n###### <span id=\"service.ReplicatedPublishEndpoints\">service.ReplicatedPublishEndpoints</span>,格式:\n\tservice.ReplicatedPublishEndpoints=value\n此属性用于高可用IceStorm手动部署。它指定为发布服务器返回的终结点的集合,从IceStorm::Topic::getPublisher返回。如果该属性没有定义,发布者代理通过主题实例点直接返回给副本,副本变成不可用时,发布者不会透传给其他副本。\n\n###### <span id=\"service.ReplicatedTopicManagerEndpoints\">service.ReplicatedTopicManagerEndpoints</span>,格式:\n\tservice.ReplicatedTopicManagerEndpoints=value\n此属性用于高可用IceStorm手动部署。它指定用于引用副本主题的代理的一组端点。这套指标应该包含每个IceStorm副本的终点。\n如,运行IceStorm::TopicManager::create返回一个包含该组端点的代理。\n\n###### <span id=\"service.Send.Timeout\">service.Send.Timeout</span>,格式:\n\tservice.Send.Timeout=num\n当IceStorm转发事件给订阅者,申请一个发送超时。此属性的值决定了IceStorm将等待一个事件完成转发。如果一个事件在num毫秒内不能被转发,订阅者被认为是死亡,其订阅被取消。默认是60秒。设置为一个负值会禁用超时。\n\n###### <span id=\"service.TopicManager.AdapterProperty\">service.TopicManager.AdapterProperty</span>,格式:\n\tservice.TopicManager.AdapterProperty=value\nIceStorm为主体管理对象适配器使用名为service.TopicManager的适配器。因此,适配器属性可用于配置该适配器。\n\n###### <span id=\"service.Trace.Election\">service.Trace.Election</span>,格式:\n\tservice.Trace.Election=num\n与选举有关的追踪活动:\n\n* 0 默认,没有选项跟踪。\n* 1 跟踪选举活动。\n\n###### <span id=\"service.Trace.Replication\">service.Trace.Replication</span>,格式:\n\tservice.Trace.Replication=num\n跟踪与副本相关的活动:\n\n* 0 默认,没有副本跟踪。\n* 1 跟踪副本活动。\n\n###### <span id=\"service.Trace.Subscriber\">service.Trace.Subscriber</span>,格式:\n\tservice.Trace.Subscriber=num\n订阅跟踪级别:\n\n* 0 默认,没有订阅跟踪。\n* 1 在订阅和取消订阅跟踪话题的诊断信息。\n* 2 像1,当丰富,包括一个订阅状态的转换(如在一个临时网络故障后脱机,以及在成功的重试等之后再进行在线)。\n\n###### <span id=\"service.Trace.Topic\">service.Trace.Topic</span>,格式:\n\tservice.Trace.Topic=num\n主题跟踪级别:\n\n* 0 默认,没有主题跟踪。\n* 1 微量主题链接,订阅和退订。\n* 2 像1,当丰富,包括服务质量信息,和其他诊断信息。\n\n###### <span id=\"service.Trace.TopicManager\">service.Trace.TopicManager</span>,格式:\n\tservice.Trace.TopicManager=num\n主题管理器跟踪级别:\n\n* 0 默认,没有主题管理器跟踪。\n* 1 主题创建跟踪。\n\n###### <span id=\"service.Transient\">service.Transient</span>,格式:\n\tservice.Transient=num\n如果num大于0,在无需数据库下IceStorm运行在完全过渡模式。在这种模式下不支持副本。默认是0。\n\n--)-\n[返回目录](#目录)\n## <span id=\"IceStormAdmin\">IceStormAdmin</span>\n###### <span id=\"IceStormAdmin.Host\">IceStormAdmin.Host</span>,格式:\n\tIceStormAdmin.Host=host\n当IceStormAdmin.Port一起使用时,icestormadmin使用查找器接口在指定主机和端口发现话题管理器。\n如果你定义一个或多个IceStormAdmin.TopicManager属性,icestormadmin会忽略该属性。\n\n###### <span id=\"IceStormAdmin.Port\">IceStormAdmin.Port</span>,格式:\n\tIceStormAdmin.Port=port\n当IceStormAdmin.Host一起使用,icestormadmin使用查找器接口在指定主机和端口发现话题管理器。\n如果你定义一个或多个IceStormAdmin.TopicManager属性,icestormadmin会忽略该属性。\n\n###### <span id=\"IceStormAdmin.TopicManager.Default\">IceStormAdmin.TopicManager.Default</span>,格式:\n\tIceStormAdmin.TopicManager.Default=proxy\n为IceStorm话题管理器定义代理。该属性用于icestormadmin。IceStorm应用也许使用这个属性为其配置。\n\n###### <span id=\"IceStormAdmin.TopicManager.name\">IceStormAdmin.TopicManager.name</span>,格式:\n\tIceStormAdmin.TopicManager.name=proxy\n为icestormadmin的话题管理器定义代理。如果多个主题管理者正在使用,被icestormadmin所用。例如:\n\t\n\tIceStormAdmin.TopicManager.A=A/TopicManager:tcp -h x -p 9995\n\tIceStormAdmin.TopicManager.B=Foo/TopicManager:tcp -h x -p 9995\n\tIceStormAdmin.TopicManager.C=Bar/TopicManager:tcp -h x -p 9987\n这为三个主题管理者设置代理。请注意,name不匹配对应的主题管理器的实例名称,name只是用作标记。有了这些属性设置,icestormadmin命令接受的话题现在可以指定主题的经理不是IceStormAdmin.TopicManager.Default配置的默认话题管理器。如:\n\n\tcurrent Foo\n\tcreate myTopic\n\tcreate Bar/myOtherTopic\n为当前话题管理器设置其中一个实例名称为foo;第一个create命令在话题管理器里面创建话题,而第二个create命令使用名为Bar的话题管理器实例。" }, { "alpha_fraction": 0.7758007049560547, "alphanum_fraction": 0.7758007049560547, "avg_line_length": 19.071428298950195, "blob_id": "68a7ec86c51296063b5acb1104c31fb5f74304be", "content_id": "53502fb975e8a9d58cdf7172eaf62967d2a72f1c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 291, "license_type": "no_license", "max_line_length": 60, "num_lines": 14, "path": "/java/iceTicketProject/src/main/java/com/zzwtec/iceTicketProject/ice/service/MyTicketService.java", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "package com.zzwtec.iceTicketProject.ice.service;\n\nimport Ice.Object;\n\npublic class MyTicketService extends AbstractIceBoxService {\n\n\t@Override\n\tpublic Object createMyIceServiceObj(String[] args) {\n\t\t//创建servant并返回\n\t\tIce.Object object = new TicketServiceI();\n\t\treturn object;\n\t}\n\n}\n" }, { "alpha_fraction": 0.7439862489700317, "alphanum_fraction": 0.787800669670105, "avg_line_length": 31.33333396911621, "blob_id": "7be98dfa9b6197b540f9fff393a78fee8774e516", "content_id": "5959c75eb5f584a8423fec8fa2e52ad7224d0409", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1164, "license_type": "no_license", "max_line_length": 104, "num_lines": 36, "path": "/java/iceTicketProject/src/test/java/com/zzwtec/iceTicketProject/TestSpring.java", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "package com.zzwtec.iceTicketProject;\n\nimport java.util.List;\n\nimport junit.framework.Assert;\n\nimport org.springframework.context.support.ClassPathXmlApplicationContext;\n\nimport com.zzwtec.iceTicketProject.db.TBOrder;\nimport com.zzwtec.iceTicketProject.spring.TicketOrderServiceSpringImp;\n\npublic class TestSpring {\n\n\tpublic static void main(String[] args) {\n\t\t// TODO Auto-generated method stub\n\t\ttestAll();\n\t}\n\t\n\tpublic static void testAll(){\n\t\tClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext(\"applicationContext.xml\");\n\t\tTicketOrderServiceSpringImp springImp = context.getBean(TicketOrderServiceSpringImp.class);\n\t\tTBOrder theOrder = new TBOrder();\n\t\ttheOrder.setPhone(\"13631276694\");\n\t\ttheOrder.setAmount(33.33);\n\t\ttheOrder.setOrderDate(2015120119);\n\t\ttheOrder.setOrderNum(\"201512011918001\");\n\t\tboolean success = springImp.createOrder(theOrder);\n\t\tAssert.assertEquals(true, success);\n\t\tList<TBOrder> orders = springImp.queryMyOrders(\"13631276694\");\n\t\tAssert.assertEquals(true, !orders.isEmpty());\n\t\ttheOrder = orders.iterator().next();\n\t\tsuccess = springImp.cancleOrder(theOrder.getId());\n\t\tAssert.assertEquals(true, success);\n\t}\n\n}\n" }, { "alpha_fraction": 0.6772366762161255, "alphanum_fraction": 0.6866742372512817, "avg_line_length": 36.30986022949219, "blob_id": "9e707d28d2844943ab167991615c93710be3db5e", "content_id": "8446131066da8c9ef709dec658f8366fc2e2adcb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 5356, "license_type": "no_license", "max_line_length": 178, "num_lines": 142, "path": "/java/iceTicketProject/src/main/java/com/zzwtec/iceTicketProject/ice/glacier2/Glacier2Client.java", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "package com.zzwtec.iceTicketProject.ice.glacier2;\n\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.IOException;\nimport java.lang.reflect.Method;\nimport java.util.Properties;\n\nimport Glacier2.SessionHelper;\nimport Glacier2.SessionNotExistException;\nimport Ice.ObjectPrx;\n\nimport com.zzwtec.ticket.ticket.Order;\nimport com.zzwtec.ticket.ticket.TicketServicePrx;\n\n/**\n * Glacier2客户端测试 <br>\n * 客户端使用SSL通过Glacier2,调用服务。<br>\n * 而服务是使用Grid\n * @author 邓燎燕\n *\n */\npublic class Glacier2Client {\n\n\tprivate static Glacier2.SessionFactoryHelper sessionFactoryHelper;\n\tpublic static Glacier2.SessionHelper _sessionHelper;\n\t\n\t@SuppressWarnings(\"rawtypes\")\n\tpublic static void connect(final Class serviceCls,final Glacier2Callback callback){\n\t\tIce.InitializationData initData = new Ice.InitializationData();\n\t\tinitData.properties = Ice.Util.createProperties();\n\t\t\n\t\ttry {\n\t\t\tFile file = new File(\"/Users/dengliaoyan/Documents/Work/Workspaces/Java_workspaces/iceTicketProject/target/classes/iceclient_glacier2.properties\");\n\t\t\tFileInputStream inputStream = new FileInputStream(file);\n Properties properties = new Properties();\n properties.load(inputStream);\n for (String name : properties.stringPropertyNames()) {\n \tif(name.equals(\"idleTimeOutSeconds\")){\n \t\tcontinue;\n \t}\n String value = properties.getProperty(name);\n initData.properties.setProperty(name, value);\n }\n\n// initData.properties.setProperty(\"Ice.RetryIntervals\", \"-1\");\n// initData.properties.setProperty(\"Ice.Trace.Network\", \"0\");\n// initData.properties.setProperty(\"Ice.Plugin.IceSSL\", \"IceSSL.PluginFactory\");\n// \n// if(initData.properties.getPropertyAsIntWithDefault(\"IceSSL.UsePlatformCAs\", 0) == 0) {\n// initData.properties.setProperty(\"Ice.InitPlugins\", \"0\");\n// \tinitData.properties.setProperty(\"IceSSL.VerifyPeer\", \"0\");\n// initData.properties.setProperty(\"IceSSL.Trace.Security\", \"1\");\n// initData.properties.setProperty(\"IceSSL.KeystoreType\", \"JKS\");\n// initData.properties.setProperty(\"IceSSL.Password\", \"123456\");\n// }\n \n } catch(IOException ex) {\n ex.printStackTrace();\n }\n\t\t\n\t\tsessionFactoryHelper = new Glacier2.SessionFactoryHelper(initData, new Glacier2.SessionCallback(){\n\n\t\t\t@Override\n\t\t\tpublic void connectFailed(SessionHelper arg0, Throwable ex) {\n\t\t\t\tSystem.out.println(\"sessionHelper connectFailed\");\n\t\t\t\tex.printStackTrace();\n\t\t\t}\n\n\t\t\t@Override\n\t\t\tpublic void connected(SessionHelper sessionHelper) throws SessionNotExistException {\n\t\t\t\tSystem.out.println(\"sessionHelper connected\");\n\t\t\t\tString clsName = serviceCls.getName();\n\t\t\t\tSystem.out.println(\"clsName:\"+clsName);\n\t\t\t\tString serviceName = serviceCls.getSimpleName();\n\t\t\t\tint pos = serviceName.lastIndexOf(\"Prx\");\n\t\t\t\tif(pos <= 0){\n\t\t\t\t\tthrow new java.lang.IllegalArgumentException(\"Invalid ObjectPrx class ,class name must end with Prx\");\n\t\t\t\t}\n\t\t\t\tString realSvName = serviceName.substring(0,pos);\n\t\t\t\tSystem.out.println(\"realSvName:\"+realSvName);\n\t\t\t\ttry {\n\t\t\t\t\tIce.ObjectPrx base = _sessionHelper.communicator().stringToProxy(realSvName);\n\t\t\t\t\t\n\t\t\t\t\tObjectPrx proxy = (ObjectPrx) Class.forName(clsName+\"Helper\").newInstance();\n\t\t\t\t\tMethod m1 = proxy.getClass().getDeclaredMethod(\"uncheckedCast\", ObjectPrx.class);\n\t\t\t\t\tproxy = (ObjectPrx)m1.invoke(proxy, base);\n\t\t\t\t\t\n\t\t\t\t\tcallback.callback(proxy);\n\t\t\t\t\t\n\t\t\t\t\tSystem.out.println(\"=========================\");\n\t\t\t\t\t\n\t\t\t\t\t_sessionHelper.destroy();\n\t\t\t\t} catch(Exception e){\n\t\t\t\t\te.printStackTrace();\n\t\t\t\t\tthrow new RuntimeException(e);\n\t\t\t\t}\n\t\t\t\t\n\t\t\t}\n\n\t\t\t@Override\n\t\t\tpublic void createdCommunicator(SessionHelper sessionHelper) {\n\t\t\t\tSystem.out.println(\"sessionHelper createdCommunicator\");\n//\t\t\t\tIce.Communicator communicator = sessionHelper.communicator();\n//\t\t\t\tif(communicator.getProperties().getPropertyAsIntWithDefault(\"IceSSL.UsePlatformCAs\", 0) == 0) {\n//\t\t\t\t\ttry {\n//\t\t\t\t\t\tjava.io.InputStream certStream = new FileInputStream(new File(\"/Users/dengliaoyan/Documents/Work/Workspaces/Java_workspaces/iceTicketProject/target/classes/client.jks\"));\n//\t\t\t\t\t\tIceSSL.Plugin plugin = (IceSSL.Plugin)communicator.getPluginManager().getPlugin(\"IceSSL\");\n//\t\t\t\t\t\tplugin.setKeystoreStream(certStream);\n// communicator.getPluginManager().initializePlugins();\n//\t\t\t\t\t} catch (Exception e) {\n//\t\t\t\t\t\te.printStackTrace();\n//\t\t\t\t\t}\n//\t\t\t\t}\n\t\t\t}\n\n\t\t\t@Override\n\t\t\tpublic void disconnected(SessionHelper sessionHelper) {\n\t\t\t\tSystem.out.println(\"sessionHelper disconnected\");\n\t\t\t}});\n\t\t_sessionHelper = sessionFactoryHelper.connect(\"dly\", \"123456\");\n\t}\n\t\n\t//测试\n\tpublic static void main(String[] s) throws InterruptedException{\n\t\tGlacier2Callback callback = new Glacier2Callback() {\n\t\t\t@Override\n\t\t\tpublic void callback(ObjectPrx proxy) {\n\t\t\t\tTicketServicePrx ticketService = (TicketServicePrx)proxy;\n\t\t\t\tOrder[] orders = ticketService.queryMyOrders(\"13631276694\");\n\t\t\t\tif(orders!=null){\n\t\t\t\t\tSystem.out.println(\"orders.length:\"+orders.length);\n\t\t\t\t}else{\n\t\t\t\t\tSystem.out.println(\"orders is null\");\n\t\t\t\t}\n\t\t\t}\n\t\t};\n\t\t\n\t\tGlacier2Client.connect(TicketServicePrx.class,callback);\n\t\tThread.sleep(2000);\n\t}\n}\n" }, { "alpha_fraction": 0.5317131876945496, "alphanum_fraction": 0.5329941511154175, "avg_line_length": 36.948612213134766, "blob_id": "0ca0e5b7b6ec551c2bbe01f56f094739a57ef326", "content_id": "a93f331bb23422c72d1e9cd748426175441975a3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 27495, "license_type": "no_license", "max_line_length": 153, "num_lines": 720, "path": "/java/iceTicketProject/generated/com/zzwtec/ticket/ticket/TicketServicePrxHelper.java", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "// **********************************************************************\n//\n// Copyright (c) 2003-2015 ZeroC, Inc. All rights reserved.\n//\n// This copy of Ice is licensed to you under the terms described in the\n// ICE_LICENSE file included in this distribution.\n//\n// **********************************************************************\n//\n// Ice version 3.6.1\n//\n// <auto-generated>\n//\n// Generated from file `TicketServer.ice'\n//\n// Warning: do not edit this file.\n//\n// </auto-generated>\n//\n\npackage com.zzwtec.ticket.ticket;\n\n/**\n * 票务服务接口\n **/\npublic final class TicketServicePrxHelper extends Ice.ObjectPrxHelperBase implements TicketServicePrx\n{\n private static final String __cancleOrder_name = \"cancleOrder\";\n\n /**\n * 取消订单\n **/\n public boolean cancleOrder(long orderId)\n {\n return cancleOrder(orderId, null, false);\n }\n\n /**\n * 取消订单\n * @param __ctx The Context map to send with the invocation.\n **/\n public boolean cancleOrder(long orderId, java.util.Map<String, String> __ctx)\n {\n return cancleOrder(orderId, __ctx, true);\n }\n\n private boolean cancleOrder(long orderId, java.util.Map<String, String> __ctx, boolean __explicitCtx)\n {\n __checkTwowayOnly(__cancleOrder_name);\n return end_cancleOrder(begin_cancleOrder(orderId, __ctx, __explicitCtx, true, null));\n }\n\n /**\n * 取消订单\n * @param __cb The callback object for the operation.\n **/\n public Ice.AsyncResult begin_cancleOrder(long orderId)\n {\n return begin_cancleOrder(orderId, null, false, false, null);\n }\n\n /**\n * 取消订单\n * @param __cb The callback object for the operation.\n * @param __ctx The Context map to send with the invocation.\n **/\n public Ice.AsyncResult begin_cancleOrder(long orderId, java.util.Map<String, String> __ctx)\n {\n return begin_cancleOrder(orderId, __ctx, true, false, null);\n }\n\n /**\n * 取消订单\n * @param __cb The callback object for the operation.\n **/\n public Ice.AsyncResult begin_cancleOrder(long orderId, Ice.Callback __cb)\n {\n return begin_cancleOrder(orderId, null, false, false, __cb);\n }\n\n /**\n * 取消订单\n * @param __cb The callback object for the operation.\n * @param __ctx The Context map to send with the invocation.\n **/\n public Ice.AsyncResult begin_cancleOrder(long orderId, java.util.Map<String, String> __ctx, Ice.Callback __cb)\n {\n return begin_cancleOrder(orderId, __ctx, true, false, __cb);\n }\n\n /**\n * 取消订单\n * @param __cb The callback object for the operation.\n **/\n public Ice.AsyncResult begin_cancleOrder(long orderId, Callback_TicketService_cancleOrder __cb)\n {\n return begin_cancleOrder(orderId, null, false, false, __cb);\n }\n\n /**\n * 取消订单\n * @param __cb The callback object for the operation.\n * @param __ctx The Context map to send with the invocation.\n **/\n public Ice.AsyncResult begin_cancleOrder(long orderId, java.util.Map<String, String> __ctx, Callback_TicketService_cancleOrder __cb)\n {\n return begin_cancleOrder(orderId, __ctx, true, false, __cb);\n }\n\n public Ice.AsyncResult begin_cancleOrder(long orderId, \n IceInternal.Functional_BoolCallback __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb)\n {\n return begin_cancleOrder(orderId, null, false, false, __responseCb, __exceptionCb, null);\n }\n\n public Ice.AsyncResult begin_cancleOrder(long orderId, \n IceInternal.Functional_BoolCallback __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb, \n IceInternal.Functional_BoolCallback __sentCb)\n {\n return begin_cancleOrder(orderId, null, false, false, __responseCb, __exceptionCb, __sentCb);\n }\n\n public Ice.AsyncResult begin_cancleOrder(long orderId, \n java.util.Map<String, String> __ctx, \n IceInternal.Functional_BoolCallback __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb)\n {\n return begin_cancleOrder(orderId, __ctx, true, false, __responseCb, __exceptionCb, null);\n }\n\n public Ice.AsyncResult begin_cancleOrder(long orderId, \n java.util.Map<String, String> __ctx, \n IceInternal.Functional_BoolCallback __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb, \n IceInternal.Functional_BoolCallback __sentCb)\n {\n return begin_cancleOrder(orderId, __ctx, true, false, __responseCb, __exceptionCb, __sentCb);\n }\n\n private Ice.AsyncResult begin_cancleOrder(long orderId, \n java.util.Map<String, String> __ctx, \n boolean __explicitCtx, \n boolean __synchronous, \n IceInternal.Functional_BoolCallback __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb, \n IceInternal.Functional_BoolCallback __sentCb)\n {\n return begin_cancleOrder(orderId, __ctx, __explicitCtx, __synchronous, \n new IceInternal.Functional_TwowayCallbackBool(__responseCb, __exceptionCb, __sentCb)\n {\n public final void __completed(Ice.AsyncResult __result)\n {\n TicketServicePrxHelper.__cancleOrder_completed(this, __result);\n }\n });\n }\n\n private Ice.AsyncResult begin_cancleOrder(long orderId, \n java.util.Map<String, String> __ctx, \n boolean __explicitCtx, \n boolean __synchronous, \n IceInternal.CallbackBase __cb)\n {\n __checkAsyncTwowayOnly(__cancleOrder_name);\n IceInternal.OutgoingAsync __result = getOutgoingAsync(__cancleOrder_name, __cb);\n try\n {\n __result.prepare(__cancleOrder_name, Ice.OperationMode.Normal, __ctx, __explicitCtx, __synchronous);\n IceInternal.BasicStream __os = __result.startWriteParams(Ice.FormatType.DefaultFormat);\n __os.writeLong(orderId);\n __result.endWriteParams();\n __result.invoke();\n }\n catch(Ice.Exception __ex)\n {\n __result.abort(__ex);\n }\n return __result;\n }\n\n /**\n * ice_response indicates that\n * the operation completed successfully.\n **/\n public boolean end_cancleOrder(Ice.AsyncResult __iresult)\n {\n IceInternal.OutgoingAsync __result = IceInternal.OutgoingAsync.check(__iresult, this, __cancleOrder_name);\n try\n {\n if(!__result.__wait())\n {\n try\n {\n __result.throwUserException();\n }\n catch(Ice.UserException __ex)\n {\n throw new Ice.UnknownUserException(__ex.ice_name(), __ex);\n }\n }\n IceInternal.BasicStream __is = __result.startReadParams();\n boolean __ret;\n __ret = __is.readBool();\n __result.endReadParams();\n return __ret;\n }\n finally\n {\n if(__result != null)\n {\n __result.cacheMessageBuffers();\n }\n }\n }\n\n static public void __cancleOrder_completed(Ice.TwowayCallbackBool __cb, Ice.AsyncResult __result)\n {\n com.zzwtec.ticket.ticket.TicketServicePrx __proxy = (com.zzwtec.ticket.ticket.TicketServicePrx)__result.getProxy();\n boolean __ret = false;\n try\n {\n __ret = __proxy.end_cancleOrder(__result);\n }\n catch(Ice.LocalException __ex)\n {\n __cb.exception(__ex);\n return;\n }\n catch(Ice.SystemException __ex)\n {\n __cb.exception(__ex);\n return;\n }\n __cb.response(__ret);\n }\n\n private static final String __createOrder_name = \"createOrder\";\n\n /**\n * 下单\n **/\n public boolean createOrder(Order myOrder)\n {\n return createOrder(myOrder, null, false);\n }\n\n /**\n * 下单\n * @param __ctx The Context map to send with the invocation.\n **/\n public boolean createOrder(Order myOrder, java.util.Map<String, String> __ctx)\n {\n return createOrder(myOrder, __ctx, true);\n }\n\n private boolean createOrder(Order myOrder, java.util.Map<String, String> __ctx, boolean __explicitCtx)\n {\n __checkTwowayOnly(__createOrder_name);\n return end_createOrder(begin_createOrder(myOrder, __ctx, __explicitCtx, true, null));\n }\n\n /**\n * 下单\n * @param __cb The callback object for the operation.\n **/\n public Ice.AsyncResult begin_createOrder(Order myOrder)\n {\n return begin_createOrder(myOrder, null, false, false, null);\n }\n\n /**\n * 下单\n * @param __cb The callback object for the operation.\n * @param __ctx The Context map to send with the invocation.\n **/\n public Ice.AsyncResult begin_createOrder(Order myOrder, java.util.Map<String, String> __ctx)\n {\n return begin_createOrder(myOrder, __ctx, true, false, null);\n }\n\n /**\n * 下单\n * @param __cb The callback object for the operation.\n **/\n public Ice.AsyncResult begin_createOrder(Order myOrder, Ice.Callback __cb)\n {\n return begin_createOrder(myOrder, null, false, false, __cb);\n }\n\n /**\n * 下单\n * @param __cb The callback object for the operation.\n * @param __ctx The Context map to send with the invocation.\n **/\n public Ice.AsyncResult begin_createOrder(Order myOrder, java.util.Map<String, String> __ctx, Ice.Callback __cb)\n {\n return begin_createOrder(myOrder, __ctx, true, false, __cb);\n }\n\n /**\n * 下单\n * @param __cb The callback object for the operation.\n **/\n public Ice.AsyncResult begin_createOrder(Order myOrder, Callback_TicketService_createOrder __cb)\n {\n return begin_createOrder(myOrder, null, false, false, __cb);\n }\n\n /**\n * 下单\n * @param __cb The callback object for the operation.\n * @param __ctx The Context map to send with the invocation.\n **/\n public Ice.AsyncResult begin_createOrder(Order myOrder, java.util.Map<String, String> __ctx, Callback_TicketService_createOrder __cb)\n {\n return begin_createOrder(myOrder, __ctx, true, false, __cb);\n }\n\n public Ice.AsyncResult begin_createOrder(Order myOrder, \n IceInternal.Functional_BoolCallback __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb)\n {\n return begin_createOrder(myOrder, null, false, false, __responseCb, __exceptionCb, null);\n }\n\n public Ice.AsyncResult begin_createOrder(Order myOrder, \n IceInternal.Functional_BoolCallback __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb, \n IceInternal.Functional_BoolCallback __sentCb)\n {\n return begin_createOrder(myOrder, null, false, false, __responseCb, __exceptionCb, __sentCb);\n }\n\n public Ice.AsyncResult begin_createOrder(Order myOrder, \n java.util.Map<String, String> __ctx, \n IceInternal.Functional_BoolCallback __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb)\n {\n return begin_createOrder(myOrder, __ctx, true, false, __responseCb, __exceptionCb, null);\n }\n\n public Ice.AsyncResult begin_createOrder(Order myOrder, \n java.util.Map<String, String> __ctx, \n IceInternal.Functional_BoolCallback __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb, \n IceInternal.Functional_BoolCallback __sentCb)\n {\n return begin_createOrder(myOrder, __ctx, true, false, __responseCb, __exceptionCb, __sentCb);\n }\n\n private Ice.AsyncResult begin_createOrder(Order myOrder, \n java.util.Map<String, String> __ctx, \n boolean __explicitCtx, \n boolean __synchronous, \n IceInternal.Functional_BoolCallback __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb, \n IceInternal.Functional_BoolCallback __sentCb)\n {\n return begin_createOrder(myOrder, __ctx, __explicitCtx, __synchronous, \n new IceInternal.Functional_TwowayCallbackBool(__responseCb, __exceptionCb, __sentCb)\n {\n public final void __completed(Ice.AsyncResult __result)\n {\n TicketServicePrxHelper.__createOrder_completed(this, __result);\n }\n });\n }\n\n private Ice.AsyncResult begin_createOrder(Order myOrder, \n java.util.Map<String, String> __ctx, \n boolean __explicitCtx, \n boolean __synchronous, \n IceInternal.CallbackBase __cb)\n {\n __checkAsyncTwowayOnly(__createOrder_name);\n IceInternal.OutgoingAsync __result = getOutgoingAsync(__createOrder_name, __cb);\n try\n {\n __result.prepare(__createOrder_name, Ice.OperationMode.Normal, __ctx, __explicitCtx, __synchronous);\n IceInternal.BasicStream __os = __result.startWriteParams(Ice.FormatType.DefaultFormat);\n Order.__write(__os, myOrder);\n __result.endWriteParams();\n __result.invoke();\n }\n catch(Ice.Exception __ex)\n {\n __result.abort(__ex);\n }\n return __result;\n }\n\n /**\n * ice_response indicates that\n * the operation completed successfully.\n **/\n public boolean end_createOrder(Ice.AsyncResult __iresult)\n {\n IceInternal.OutgoingAsync __result = IceInternal.OutgoingAsync.check(__iresult, this, __createOrder_name);\n try\n {\n if(!__result.__wait())\n {\n try\n {\n __result.throwUserException();\n }\n catch(Ice.UserException __ex)\n {\n throw new Ice.UnknownUserException(__ex.ice_name(), __ex);\n }\n }\n IceInternal.BasicStream __is = __result.startReadParams();\n boolean __ret;\n __ret = __is.readBool();\n __result.endReadParams();\n return __ret;\n }\n finally\n {\n if(__result != null)\n {\n __result.cacheMessageBuffers();\n }\n }\n }\n\n static public void __createOrder_completed(Ice.TwowayCallbackBool __cb, Ice.AsyncResult __result)\n {\n com.zzwtec.ticket.ticket.TicketServicePrx __proxy = (com.zzwtec.ticket.ticket.TicketServicePrx)__result.getProxy();\n boolean __ret = false;\n try\n {\n __ret = __proxy.end_createOrder(__result);\n }\n catch(Ice.LocalException __ex)\n {\n __cb.exception(__ex);\n return;\n }\n catch(Ice.SystemException __ex)\n {\n __cb.exception(__ex);\n return;\n }\n __cb.response(__ret);\n }\n\n private static final String __queryMyOrders_name = \"queryMyOrders\";\n\n /**\n * 查询订单\n **/\n public Order[] queryMyOrders(String phone)\n {\n return queryMyOrders(phone, null, false);\n }\n\n /**\n * 查询订单\n * @param __ctx The Context map to send with the invocation.\n **/\n public Order[] queryMyOrders(String phone, java.util.Map<String, String> __ctx)\n {\n return queryMyOrders(phone, __ctx, true);\n }\n\n private Order[] queryMyOrders(String phone, java.util.Map<String, String> __ctx, boolean __explicitCtx)\n {\n __checkTwowayOnly(__queryMyOrders_name);\n return end_queryMyOrders(begin_queryMyOrders(phone, __ctx, __explicitCtx, true, null));\n }\n\n /**\n * 查询订单\n * @param __cb The callback object for the operation.\n **/\n public Ice.AsyncResult begin_queryMyOrders(String phone)\n {\n return begin_queryMyOrders(phone, null, false, false, null);\n }\n\n /**\n * 查询订单\n * @param __cb The callback object for the operation.\n * @param __ctx The Context map to send with the invocation.\n **/\n public Ice.AsyncResult begin_queryMyOrders(String phone, java.util.Map<String, String> __ctx)\n {\n return begin_queryMyOrders(phone, __ctx, true, false, null);\n }\n\n /**\n * 查询订单\n * @param __cb The callback object for the operation.\n **/\n public Ice.AsyncResult begin_queryMyOrders(String phone, Ice.Callback __cb)\n {\n return begin_queryMyOrders(phone, null, false, false, __cb);\n }\n\n /**\n * 查询订单\n * @param __cb The callback object for the operation.\n * @param __ctx The Context map to send with the invocation.\n **/\n public Ice.AsyncResult begin_queryMyOrders(String phone, java.util.Map<String, String> __ctx, Ice.Callback __cb)\n {\n return begin_queryMyOrders(phone, __ctx, true, false, __cb);\n }\n\n /**\n * 查询订单\n * @param __cb The callback object for the operation.\n **/\n public Ice.AsyncResult begin_queryMyOrders(String phone, Callback_TicketService_queryMyOrders __cb)\n {\n return begin_queryMyOrders(phone, null, false, false, __cb);\n }\n\n /**\n * 查询订单\n * @param __cb The callback object for the operation.\n * @param __ctx The Context map to send with the invocation.\n **/\n public Ice.AsyncResult begin_queryMyOrders(String phone, java.util.Map<String, String> __ctx, Callback_TicketService_queryMyOrders __cb)\n {\n return begin_queryMyOrders(phone, __ctx, true, false, __cb);\n }\n\n public Ice.AsyncResult begin_queryMyOrders(String phone, \n IceInternal.Functional_GenericCallback1<Order[]> __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb)\n {\n return begin_queryMyOrders(phone, null, false, false, __responseCb, __exceptionCb, null);\n }\n\n public Ice.AsyncResult begin_queryMyOrders(String phone, \n IceInternal.Functional_GenericCallback1<Order[]> __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb, \n IceInternal.Functional_BoolCallback __sentCb)\n {\n return begin_queryMyOrders(phone, null, false, false, __responseCb, __exceptionCb, __sentCb);\n }\n\n public Ice.AsyncResult begin_queryMyOrders(String phone, \n java.util.Map<String, String> __ctx, \n IceInternal.Functional_GenericCallback1<Order[]> __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb)\n {\n return begin_queryMyOrders(phone, __ctx, true, false, __responseCb, __exceptionCb, null);\n }\n\n public Ice.AsyncResult begin_queryMyOrders(String phone, \n java.util.Map<String, String> __ctx, \n IceInternal.Functional_GenericCallback1<Order[]> __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb, \n IceInternal.Functional_BoolCallback __sentCb)\n {\n return begin_queryMyOrders(phone, __ctx, true, false, __responseCb, __exceptionCb, __sentCb);\n }\n\n private Ice.AsyncResult begin_queryMyOrders(String phone, \n java.util.Map<String, String> __ctx, \n boolean __explicitCtx, \n boolean __synchronous, \n IceInternal.Functional_GenericCallback1<Order[]> __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb, \n IceInternal.Functional_BoolCallback __sentCb)\n {\n return begin_queryMyOrders(phone, __ctx, __explicitCtx, __synchronous, \n new IceInternal.Functional_TwowayCallbackArg1<com.zzwtec.ticket.ticket.Order[]>(__responseCb, __exceptionCb, __sentCb)\n {\n public final void __completed(Ice.AsyncResult __result)\n {\n TicketServicePrxHelper.__queryMyOrders_completed(this, __result);\n }\n });\n }\n\n private Ice.AsyncResult begin_queryMyOrders(String phone, \n java.util.Map<String, String> __ctx, \n boolean __explicitCtx, \n boolean __synchronous, \n IceInternal.CallbackBase __cb)\n {\n __checkAsyncTwowayOnly(__queryMyOrders_name);\n IceInternal.OutgoingAsync __result = getOutgoingAsync(__queryMyOrders_name, __cb);\n try\n {\n __result.prepare(__queryMyOrders_name, Ice.OperationMode.Normal, __ctx, __explicitCtx, __synchronous);\n IceInternal.BasicStream __os = __result.startWriteParams(Ice.FormatType.DefaultFormat);\n __os.writeString(phone);\n __result.endWriteParams();\n __result.invoke();\n }\n catch(Ice.Exception __ex)\n {\n __result.abort(__ex);\n }\n return __result;\n }\n\n /**\n * ice_response indicates that\n * the operation completed successfully.\n **/\n public Order[] end_queryMyOrders(Ice.AsyncResult __iresult)\n {\n IceInternal.OutgoingAsync __result = IceInternal.OutgoingAsync.check(__iresult, this, __queryMyOrders_name);\n try\n {\n if(!__result.__wait())\n {\n try\n {\n __result.throwUserException();\n }\n catch(Ice.UserException __ex)\n {\n throw new Ice.UnknownUserException(__ex.ice_name(), __ex);\n }\n }\n IceInternal.BasicStream __is = __result.startReadParams();\n Order[] __ret;\n __ret = OrderSeqHelper.read(__is);\n __result.endReadParams();\n return __ret;\n }\n finally\n {\n if(__result != null)\n {\n __result.cacheMessageBuffers();\n }\n }\n }\n\n static public void __queryMyOrders_completed(Ice.TwowayCallbackArg1<Order[]> __cb, Ice.AsyncResult __result)\n {\n com.zzwtec.ticket.ticket.TicketServicePrx __proxy = (com.zzwtec.ticket.ticket.TicketServicePrx)__result.getProxy();\n Order[] __ret = null;\n try\n {\n __ret = __proxy.end_queryMyOrders(__result);\n }\n catch(Ice.LocalException __ex)\n {\n __cb.exception(__ex);\n return;\n }\n catch(Ice.SystemException __ex)\n {\n __cb.exception(__ex);\n return;\n }\n __cb.response(__ret);\n }\n\n public static TicketServicePrx checkedCast(Ice.ObjectPrx __obj)\n {\n return checkedCastImpl(__obj, ice_staticId(), TicketServicePrx.class, TicketServicePrxHelper.class);\n }\n\n public static TicketServicePrx checkedCast(Ice.ObjectPrx __obj, java.util.Map<String, String> __ctx)\n {\n return checkedCastImpl(__obj, __ctx, ice_staticId(), TicketServicePrx.class, TicketServicePrxHelper.class);\n }\n\n public static TicketServicePrx checkedCast(Ice.ObjectPrx __obj, String __facet)\n {\n return checkedCastImpl(__obj, __facet, ice_staticId(), TicketServicePrx.class, TicketServicePrxHelper.class);\n }\n\n public static TicketServicePrx checkedCast(Ice.ObjectPrx __obj, String __facet, java.util.Map<String, String> __ctx)\n {\n return checkedCastImpl(__obj, __facet, __ctx, ice_staticId(), TicketServicePrx.class, TicketServicePrxHelper.class);\n }\n\n public static TicketServicePrx uncheckedCast(Ice.ObjectPrx __obj)\n {\n return uncheckedCastImpl(__obj, TicketServicePrx.class, TicketServicePrxHelper.class);\n }\n\n public static TicketServicePrx uncheckedCast(Ice.ObjectPrx __obj, String __facet)\n {\n return uncheckedCastImpl(__obj, __facet, TicketServicePrx.class, TicketServicePrxHelper.class);\n }\n\n public static final String[] __ids =\n {\n \"::Ice::Object\",\n \"::ticket::TicketService\"\n };\n\n public static String ice_staticId()\n {\n return __ids[1];\n }\n\n public static void __write(IceInternal.BasicStream __os, TicketServicePrx v)\n {\n __os.writeProxy(v);\n }\n\n public static TicketServicePrx __read(IceInternal.BasicStream __is)\n {\n Ice.ObjectPrx proxy = __is.readProxy();\n if(proxy != null)\n {\n TicketServicePrxHelper result = new TicketServicePrxHelper();\n result.__copyFrom(proxy);\n return result;\n }\n return null;\n }\n\n public static final long serialVersionUID = 0L;\n}\n" }, { "alpha_fraction": 0.7701298594474792, "alphanum_fraction": 0.7733766436576843, "avg_line_length": 30.428571701049805, "blob_id": "d44f89fcd83035f5494c6af6f23344b860dc2fe1", "content_id": "85d89b2fb49eb49ff0376a1fd1c8751b572a90f4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1638, "license_type": "no_license", "max_line_length": 102, "num_lines": 49, "path": "/java/iceTicketProject/src/main/java/com/zzwtec/iceTicketProject/ice/service/AbstractIceBoxService.java", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "package com.zzwtec.iceTicketProject.ice.service;\n\nimport java.util.Arrays;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport Ice.Communicator;\nimport Ice.Identity;\nimport IceBox.Service;\n\nimport com.zzwtec.iceTicketProject.ice.interceptor.PerfDispatchInterceptor;\nimport com.zzwtec.iceTicketProject.ice.util.Sl4jLoggerI;\n\npublic abstract class AbstractIceBoxService implements Service {\n\n\tprotected Ice.ObjectAdapter _adapter;\n\tprotected Identity id;\n\tprotected static Logger logger = LoggerFactory.getLogger(AbstractIceBoxService.class);\n\tprotected static Sl4jLoggerI iceLogger = new Sl4jLoggerI(\"communicator\");\n\t\n\t@Override\n\tpublic void start(String name, Communicator communicator, String[] args) {\n\t\tIce.Util.setProcessLogger(iceLogger);\n\t\t//创建objectAdapter,这里和service同名\n\t\t_adapter = communicator.createObjectAdapter(name);\n\t\t//创建servant并激活\n\t\tIce.Object object = this.createMyIceServiceObj(args);\n\t\tid = communicator.stringToIdentity(name);\n\t\t//自动添加到服务拦截框架中\n\t\t_adapter.add(PerfDispatchInterceptor.addIceObject(id, object), id);\n\t\t_adapter.activate();\n\t\tlogger.info(name+\" service started ,with param size \"+args.length+\" detail:\"+Arrays.toString(args));\n\t}\n\t@Override\n\tpublic void stop() {\n\t\tlogger.info(\"stopping service \"+id+\" ...\");\n\t\t_adapter.destroy();\n\t\tPerfDispatchInterceptor.removeIceObject(id);\n\t\tlogger.info(\"stopped service \"+id+\" stoped\");\n\t}\n\t\n\t/**\n\t * 创建具体的Ice服务器实例对象\n\t * @param args 服务的配置参数,来自icegrid.xml文件\n\t * @return Ice.Object\n\t */\n\tpublic abstract Ice.Object createMyIceServiceObj(String[] args);\n}\n" }, { "alpha_fraction": 0.7229539155960083, "alphanum_fraction": 0.7601128816604614, "avg_line_length": 29.81884002685547, "blob_id": "a3f494ab2c2b9dea7c01e5ee175dbdd74ed9a6dc", "content_id": "034b2480dfc808b5db90ea7bfa784fc9f12724fc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 5412, "license_type": "no_license", "max_line_length": 160, "num_lines": 138, "path": "/Ice分布式服务配置说明.md", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "# Registry的主从配置说明\n## Registry说明\n* 服务器A,服务器B\n* 服务器A上部署主Registry、node1\n* 服务器B上部署从Registry\n\n## 应用部署\n应用代码app部署到服务器A上\n\n## 配置说明\n1. 服务器A上的主Registry配置上添加Ice.ProgramName=Master,注释掉Ice.Admin.ServerId和Ice.Admin.InstanceName,将Ice.Default.Locator指向服务器B和本机\n2. 服务器B上的从Registry配置上添加Ice.ProgramName=Replica1、IceGrid.Registry.ReplicaName=Replica1,注释掉Ice.Admin.ServerId和Ice.Admin.InstanceName,将Ice.Default.Locator指向服务器A和本机\n3. 服务器A的node1上Ice.Default.Locator指服务器A和服务器B\n4. 在服务器A的应用代码app的grid上配置node1\n5. 客户端的Ice.Default.Locator指服务器A和服务器B,或Glaciter2的Ice.Default.Locator指服务器A和服务器B\n\n## 启动操作\n1. 启动顺序先启动服务器A的Registry、node\n2. 启动服务器B的Registry\n3. 服务器A上通过管理端添加application\n4. 启动客户端访问\n\n\n# node的分布式配置说明\n## node说明\n* 服务器A,服务器B\n* 服务器A上部署node1\n* 服务器B上部署node2\n\n## 应用部署\n应用代码app部署到服务器A和服务器B上\n\n## 配置说明\n1. node1的Registry指向服务器A\n2. node2的Registry指向服务器A\n3. 在服务器A的应用代码app的grid上配置node1和node2\n\n## 启动操作\n1. 启动顺序先启动服务器A的Registry、node\n2. 启动服务器B的node\n3. 服务器A上通过管理端添加application\n4. 启动客户端访问\n\n\n# IceGlacier2多服务器部署\n## 说明\n* 服务器A,服务器B\n* 服务器A上部署主Registry、Glacier2_1\n* 服务器B上部署从Registry、Glacier2_2\n\n## 证书说明注意\n* 生成证书时不指定server的ip,只指定域名\n* 所有的部署Glacier2都使用一样的证书\n\n## 配置说明\n* Glacier2_1的Ice.Default.Locator指服务器A和服务器B\n* Glacier2_1的配置证书,所有的部署Glacier2都使用一样的证书\n* Glacier2_2的Ice.Default.Locator指服务器A和服务器B\n* Glacier2_2的配置证书,所有的部署Glacier2都使用一样的证书\n* 客户端的Glacier2指服务器A和服务器B的Glacier2\n\n## 作用\n配置多个Glacier2主要起到分流作用。\n\n\n# IcePatch2和IceGrid集成\n## 说明\n* 服务器A,服务器B\n* 服务器A上部署Registry、node1,创建node_patch2/data、patch2/server、patch2/client\n* 服务器B上部署node2,创建node_patch2/data、patch2/client\n\n## 配置说明\n* IceGrid的配置\n\n\t\t<server-template id=\"IcePatch2ServerTemplate\">\n\t\t\t<parameter name=\"instance-name\" default=\"${application}.IcePatch2\"/>\n\t\t\t<parameter name=\"endpoints\" default=\"default\"/>\n\t\t\t<parameter name=\"directory\"/>\n\t\t\t<server id=\"${instance-name}.server\" exe=\"icepatch2server\" application-distrib=\"false\" activation=\"on-demand\">\n\t\t\t\t<adapter name=\"IcePatch2\" endpoints=\"${endpoints}\">\n\t\t\t\t\t<object identity=\"${instance-name}/server\" type=\"::IcePatch2::FileServer\"/>\n\t\t\t\t</adapter>\n\t\t\t\t<properties>\n\t\t\t\t\t<property name=\"IcePatch2.InstanceName\" value=\"${instance-name}\"/>\n\t\t\t\t\t<property name=\"IcePatch2.Directory\" value=\"${directory}\"/>\n\t\t\t\t</properties>\n\t\t\t</server>\n\t\t</server-template>\n\t\t<server-template id=\"IcePatch2ClientTemplate\">\n\t\t\t<parameter name=\"instance-name\" default=\"${application}.IcePatch2\"/>\n\t\t\t<parameter name=\"directory\"/>\n\t\t\t<parameter name=\"nodeName\"/>\n\t\t\t<server id=\"${instance-name}.${nodeName}\" exe=\"icepatch2client\" application-distrib=\"false\" activation=\"on-demand\">\n\t\t\t\t<option>-t</option>\n\t\t\t\t<properties>\n\t\t\t\t\t<property name=\"IcePatch2.InstanceName\" value=\"${instance-name}\"/>\n\t\t\t\t\t<property name=\"IcePatch2Client.Directory\" value=\"${directory}\"/>\n\t\t\t\t\t<property name=\"IcePatch2Client.ChunkSize\" value=\"10\" />\n\t\t\t\t\t<property name=\"IcePatch2Client.Thorough\" value=\"1\" />\n\t\t\t\t\t<property name=\"IcePatch2Client.Remove\" value=\"1\" /> \n\t\t\t\t\t<property name=\"IcePatch2Client.Proxy\" value=\"${instance-name}/server:tcp -h 10.175.206.101 -p 8000\"/>\n\t\t\t\t</properties>\n\t\t\t</server>\n\t\t</server-template>\n\t\t<distrib/>\n\t\t<node name=\"node1\">\n\t\t\t<server-instance template=\"IcePatch2ServerTemplate\" directory=\"/home/Ice/patch2/server\" endpoints=\"tcp -p 8000\" />\n\t\t\t<server-instance template=\"IcePatch2ClientTemplate\" directory=\"/home/Ice/patch2/client\" nodeName=\"patch2Client_1\" />\n\t\t</node>\n\t\t<node name=\"node2\">\n\t\t\t<server-instance template=\"IcePatch2ClientTemplate\" directory=\"/home/Ice/patch2/client\" nodeName=\"patch2Client_2\" />\n\t\t</node>\n\n* node1的配置\n\n\t\tIceGrid.Node.Name=node1\n\t\tIce.Default.Locator=IceGrid/Locator:tcp -p 4061\n\t\tIceGrid.Node.Data=/home/Ice/node_patch2/data\n\t\tIce.StdErr=/home/Ice/node_patch2/node.stderr.log\n\t\tIce.StdOut=/home/Ice/node_patch2/node.stdout.log\n\t\tIceGrid.Node.Endpoints=tcp -p 5061\n\n* node2的配置\n\n\t\tIceGrid.Node.Name=node1\n\t\tIce.Default.Locator=IceGrid/Locator:tcp -h 10.175.206.101 -p 4061\n\t\tIceGrid.Node.Data=/home/Ice/node_patch2/data\n\t\tIce.StdErr=/home/Ice/node_patch2/node.stderr.log\n\t\tIce.StdOut=/home/Ice/node_patch2/node.stdout.log\n\t\tIceGrid.Node.Endpoints=tcp -p 5061\n\n## 启动操作\n1. 先启动服务器A的Registry、node1\n2. 再启动服务器B的node2\n3. 将要部署的文件上传到服务器A的patch2/server里\n4. 通过icepatch2calc在patch2/server生成校验码\n5. 启动IcePatch2Server服务\n6. 再启动IcePatch2Client服务,当IcePatch2Client停止后应该就可以在patch2/client上看到相应的文件" }, { "alpha_fraction": 0.6206896305084229, "alphanum_fraction": 0.8160919547080994, "avg_line_length": 43, "blob_id": "1cc9923b56cb3b6f76c9f352d7d7c26f2bfd9f74", "content_id": "4dde8197428480cddcd9db231c7d4d290e962c12", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 87, "license_type": "no_license", "max_line_length": 64, "num_lines": 2, "path": "/android/IceClient/assets/iceclient.properties", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "Ice.Default.Locator=IceGrid/Locator:tcp -h 192.168.0.112 -p 4061\nidleTimeOutSeconds=300" }, { "alpha_fraction": 0.5744820833206177, "alphanum_fraction": 0.5770244598388672, "avg_line_length": 38.7752799987793, "blob_id": "200ead1f9ceb46f7d8c6acd600b1cc6f0a2d4099", "content_id": "13b20b4a7e80224dfcb796ad7d44ac54bc83c505", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 10812, "license_type": "no_license", "max_line_length": 141, "num_lines": 267, "path": "/java/iceTicketProject/generated/com/zzwtec/ticket/ticket/TicketServicePrx.java", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "// **********************************************************************\n//\n// Copyright (c) 2003-2015 ZeroC, Inc. All rights reserved.\n//\n// This copy of Ice is licensed to you under the terms described in the\n// ICE_LICENSE file included in this distribution.\n//\n// **********************************************************************\n//\n// Ice version 3.6.1\n//\n// <auto-generated>\n//\n// Generated from file `TicketServer.ice'\n//\n// Warning: do not edit this file.\n//\n// </auto-generated>\n//\n\npackage com.zzwtec.ticket.ticket;\n\n/**\n * 票务服务接口\n **/\npublic interface TicketServicePrx extends Ice.ObjectPrx\n{\n /**\n * 下单\n **/\n public boolean createOrder(Order myOrder);\n\n /**\n * 下单\n * @param __ctx The Context map to send with the invocation.\n **/\n public boolean createOrder(Order myOrder, java.util.Map<String, String> __ctx);\n\n /**\n * 下单\n * @return The asynchronous result object.\n **/\n public Ice.AsyncResult begin_createOrder(Order myOrder);\n\n /**\n * 下单\n * @param __ctx The Context map to send with the invocation.\n * @return The asynchronous result object.\n **/\n public Ice.AsyncResult begin_createOrder(Order myOrder, java.util.Map<String, String> __ctx);\n\n /**\n * 下单\n * @param __cb The asynchronous callback object.\n * @return The asynchronous result object.\n **/\n public Ice.AsyncResult begin_createOrder(Order myOrder, Ice.Callback __cb);\n\n /**\n * 下单\n * @param __ctx The Context map to send with the invocation.\n * @param __cb The asynchronous callback object.\n * @return The asynchronous result object.\n **/\n public Ice.AsyncResult begin_createOrder(Order myOrder, java.util.Map<String, String> __ctx, Ice.Callback __cb);\n\n /**\n * 下单\n * @param __cb The asynchronous callback object.\n * @return The asynchronous result object.\n **/\n public Ice.AsyncResult begin_createOrder(Order myOrder, Callback_TicketService_createOrder __cb);\n\n /**\n * 下单\n * @param __ctx The Context map to send with the invocation.\n * @param __cb The asynchronous callback object.\n * @return The asynchronous result object.\n **/\n public Ice.AsyncResult begin_createOrder(Order myOrder, java.util.Map<String, String> __ctx, Callback_TicketService_createOrder __cb);\n\n public Ice.AsyncResult begin_createOrder(Order myOrder, \n IceInternal.Functional_BoolCallback __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb);\n\n public Ice.AsyncResult begin_createOrder(Order myOrder, \n IceInternal.Functional_BoolCallback __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb, \n IceInternal.Functional_BoolCallback __sentCb);\n\n public Ice.AsyncResult begin_createOrder(Order myOrder, \n java.util.Map<String, String> __ctx, \n IceInternal.Functional_BoolCallback __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb);\n\n public Ice.AsyncResult begin_createOrder(Order myOrder, \n java.util.Map<String, String> __ctx, \n IceInternal.Functional_BoolCallback __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb, \n IceInternal.Functional_BoolCallback __sentCb);\n\n /**\n * 下单\n * @param __result The asynchronous result object.\n **/\n public boolean end_createOrder(Ice.AsyncResult __result);\n\n /**\n * 查询订单\n **/\n public Order[] queryMyOrders(String phone);\n\n /**\n * 查询订单\n * @param __ctx The Context map to send with the invocation.\n **/\n public Order[] queryMyOrders(String phone, java.util.Map<String, String> __ctx);\n\n /**\n * 查询订单\n * @return The asynchronous result object.\n **/\n public Ice.AsyncResult begin_queryMyOrders(String phone);\n\n /**\n * 查询订单\n * @param __ctx The Context map to send with the invocation.\n * @return The asynchronous result object.\n **/\n public Ice.AsyncResult begin_queryMyOrders(String phone, java.util.Map<String, String> __ctx);\n\n /**\n * 查询订单\n * @param __cb The asynchronous callback object.\n * @return The asynchronous result object.\n **/\n public Ice.AsyncResult begin_queryMyOrders(String phone, Ice.Callback __cb);\n\n /**\n * 查询订单\n * @param __ctx The Context map to send with the invocation.\n * @param __cb The asynchronous callback object.\n * @return The asynchronous result object.\n **/\n public Ice.AsyncResult begin_queryMyOrders(String phone, java.util.Map<String, String> __ctx, Ice.Callback __cb);\n\n /**\n * 查询订单\n * @param __cb The asynchronous callback object.\n * @return The asynchronous result object.\n **/\n public Ice.AsyncResult begin_queryMyOrders(String phone, Callback_TicketService_queryMyOrders __cb);\n\n /**\n * 查询订单\n * @param __ctx The Context map to send with the invocation.\n * @param __cb The asynchronous callback object.\n * @return The asynchronous result object.\n **/\n public Ice.AsyncResult begin_queryMyOrders(String phone, java.util.Map<String, String> __ctx, Callback_TicketService_queryMyOrders __cb);\n\n public Ice.AsyncResult begin_queryMyOrders(String phone, \n IceInternal.Functional_GenericCallback1<Order[]> __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb);\n\n public Ice.AsyncResult begin_queryMyOrders(String phone, \n IceInternal.Functional_GenericCallback1<Order[]> __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb, \n IceInternal.Functional_BoolCallback __sentCb);\n\n public Ice.AsyncResult begin_queryMyOrders(String phone, \n java.util.Map<String, String> __ctx, \n IceInternal.Functional_GenericCallback1<Order[]> __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb);\n\n public Ice.AsyncResult begin_queryMyOrders(String phone, \n java.util.Map<String, String> __ctx, \n IceInternal.Functional_GenericCallback1<Order[]> __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb, \n IceInternal.Functional_BoolCallback __sentCb);\n\n /**\n * 查询订单\n * @param __result The asynchronous result object.\n **/\n public Order[] end_queryMyOrders(Ice.AsyncResult __result);\n\n /**\n * 取消订单\n **/\n public boolean cancleOrder(long orderId);\n\n /**\n * 取消订单\n * @param __ctx The Context map to send with the invocation.\n **/\n public boolean cancleOrder(long orderId, java.util.Map<String, String> __ctx);\n\n /**\n * 取消订单\n * @return The asynchronous result object.\n **/\n public Ice.AsyncResult begin_cancleOrder(long orderId);\n\n /**\n * 取消订单\n * @param __ctx The Context map to send with the invocation.\n * @return The asynchronous result object.\n **/\n public Ice.AsyncResult begin_cancleOrder(long orderId, java.util.Map<String, String> __ctx);\n\n /**\n * 取消订单\n * @param __cb The asynchronous callback object.\n * @return The asynchronous result object.\n **/\n public Ice.AsyncResult begin_cancleOrder(long orderId, Ice.Callback __cb);\n\n /**\n * 取消订单\n * @param __ctx The Context map to send with the invocation.\n * @param __cb The asynchronous callback object.\n * @return The asynchronous result object.\n **/\n public Ice.AsyncResult begin_cancleOrder(long orderId, java.util.Map<String, String> __ctx, Ice.Callback __cb);\n\n /**\n * 取消订单\n * @param __cb The asynchronous callback object.\n * @return The asynchronous result object.\n **/\n public Ice.AsyncResult begin_cancleOrder(long orderId, Callback_TicketService_cancleOrder __cb);\n\n /**\n * 取消订单\n * @param __ctx The Context map to send with the invocation.\n * @param __cb The asynchronous callback object.\n * @return The asynchronous result object.\n **/\n public Ice.AsyncResult begin_cancleOrder(long orderId, java.util.Map<String, String> __ctx, Callback_TicketService_cancleOrder __cb);\n\n public Ice.AsyncResult begin_cancleOrder(long orderId, \n IceInternal.Functional_BoolCallback __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb);\n\n public Ice.AsyncResult begin_cancleOrder(long orderId, \n IceInternal.Functional_BoolCallback __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb, \n IceInternal.Functional_BoolCallback __sentCb);\n\n public Ice.AsyncResult begin_cancleOrder(long orderId, \n java.util.Map<String, String> __ctx, \n IceInternal.Functional_BoolCallback __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb);\n\n public Ice.AsyncResult begin_cancleOrder(long orderId, \n java.util.Map<String, String> __ctx, \n IceInternal.Functional_BoolCallback __responseCb, \n IceInternal.Functional_GenericCallback1<Ice.Exception> __exceptionCb, \n IceInternal.Functional_BoolCallback __sentCb);\n\n /**\n * 取消订单\n * @param __result The asynchronous result object.\n **/\n public boolean end_cancleOrder(Ice.AsyncResult __result);\n}\n" }, { "alpha_fraction": 0.6271374821662903, "alphanum_fraction": 0.6433488726615906, "avg_line_length": 25.96407127380371, "blob_id": "2d6daebbde77dcb618f590dd9e2e2ddf64d826cc", "content_id": "400ae60f8fd67a6620519b774a34813ab60f3961", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Maven POM", "length_bytes": 4513, "license_type": "no_license", "max_line_length": 104, "num_lines": 167, "path": "/java/iceTicketProject/pom.xml", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n\txsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n\t<modelVersion>4.0.0</modelVersion>\n\t\n\t<groupId>com.zzwtec</groupId>\n\t<artifactId>iceTicketProject</artifactId>\n\t<version>0.0.1-SNAPSHOT</version>\n\t<packaging>jar</packaging>\n\n\t<name>iceTicketProject</name>\n\t<url>http://maven.apache.org</url>\n\n\t<properties>\n\t\t<!-- ice version -->\n\t\t<ice.version>3.6.1</ice.version>\n\t\t<slice.home.dir>/Users/dengliaoyan/Documents/Work/Workspaces/Zeroc_Ice_Workspace/ice</slice.home.dir>\n\t\t<org.springframework.version>4.2.3.RELEASE</org.springframework.version>\n\t\t<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n\t</properties>\n\n\t<repositories>\n\t\t<repository>\n\t\t\t<id>zeroc</id>\n\t\t\t<name>zeroc</name>\n\t\t\t<url>http://repo.zeroc.com/nexus/content/repositories/releases</url>\n\t\t\t<releases>\n\t\t\t\t<enabled>true</enabled>\n\t\t\t</releases>\n\t\t\t<snapshots>\n\t\t\t\t<enabled>true</enabled>\n\t\t\t</snapshots>\n\t\t</repository>\n\t</repositories>\n\n\t<dependencies>\n\t\t<dependency>\n\t\t\t<groupId>junit</groupId>\n\t\t\t<artifactId>junit</artifactId>\n\t\t\t<version>3.8.1</version>\n\t\t\t<scope>test</scope>\n\t\t</dependency>\n\t\t<!-- ice -->\n\t\t<dependency>\n\t\t\t<groupId>com.zeroc</groupId>\n\t\t\t<artifactId>ice</artifactId>\n\t\t\t<version>${ice.version}</version>\n\t\t</dependency>\n\t\t<!-- icebox -->\n\t\t<dependency>\n\t\t\t<groupId>com.zeroc</groupId>\n\t\t\t<artifactId>icebox</artifactId>\n\t\t\t<version>${ice.version}</version>\n\t\t</dependency>\n\t\t<!-- icegrid -->\n\t\t<dependency>\n\t\t\t<groupId>com.zeroc</groupId>\n\t\t\t<artifactId>icegrid</artifactId>\n\t\t\t<version>${ice.version}</version>\n\t\t</dependency>\n\t\t<!-- iceglacier2 -->\n\t\t<dependency>\n\t\t\t<groupId>com.zeroc</groupId>\n\t\t\t<artifactId>glacier2</artifactId>\n\t\t\t<version>${ice.version}</version>\n\t\t</dependency>\n\t\t<!-- spring -->\n\t\t<dependency>\n\t\t\t<groupId>org.springframework</groupId>\n\t\t\t<artifactId>spring-context</artifactId>\n\t\t\t<version>${org.springframework.version}</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.springframework</groupId>\n\t\t\t<artifactId>spring-orm</artifactId>\n\t\t\t<version>${org.springframework.version}</version>\n\t\t</dependency>\n\t\t<!-- Hibernate核心包 -->\n\t\t<dependency>\n\t\t\t<groupId>mysql</groupId>\n\t\t\t<artifactId>mysql-connector-java</artifactId>\n\t\t\t<version>5.1.36</version>\n\t\t</dependency>\n\t\t<!-- 日志 -->\n\t\t<dependency>\n\t\t\t<groupId>ch.qos.logback</groupId>\n\t\t\t<artifactId>logback-classic</artifactId>\n\t\t\t<version>1.1.3</version>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>com.kenai.nbpwr</groupId>\n\t\t\t<artifactId>org-apache-commons-dbcp</artifactId>\n\t\t\t<version>1.2.2-201002241055</version>\n\t\t\t<type>nbm</type>\n\t\t</dependency>\n\t\t<dependency>\n\t\t\t<groupId>org.hibernate</groupId>\n\t\t\t<artifactId>hibernate-core</artifactId>\n\t\t\t<version>4.3.11.Final</version>\n\t\t</dependency>\n\t</dependencies>\n\n\t<build>\n\t\t<pluginManagement>\n\t\t\t<plugins>\n\t\t\t\t<plugin>\n\t\t\t\t\t<artifactId>maven-antrun-plugin</artifactId>\n\t\t\t\t\t<version>1.7</version>\n\n\t\t\t\t\t<executions>\n\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t<goal>run</goal>\n\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t<target name=\"slice2java\">\n\t\t\t\t\t\t\t\t\t<taskdef name=\"slice2java\" classname=\"Slice2JavaTask\"\n\t\t\t\t\t\t\t\t\t\tclasspathref=\"maven.plugin.classpath\" />\n\t\t\t\t\t\t\t\t\t<slice2java outputdir=\"generated\">\n\t\t\t\t\t\t\t\t\t\t<fileset dir=\"slice\" includes=\"*.ice\" />\n\t\t\t\t\t\t\t\t\t</slice2java>\n\t\t\t\t\t\t\t\t</target>\n\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t</execution>\n\t\t\t\t\t</executions>\n\t\t\t\t\t<dependencies>\n\t\t\t\t\t\t<!-- ant-ice -->\n\t\t\t\t\t\t<dependency>\n\t\t\t\t\t\t\t<groupId>com.zeroc</groupId>\n\t\t\t\t\t\t\t<artifactId>ant-ice</artifactId>\n\t\t\t\t\t\t\t<version>${ice.version}</version>\n\t\t\t\t\t\t\t<scope>system</scope>\n\t\t\t\t\t\t</dependency>\n\t\t\t\t\t</dependencies>\n\t\t\t\t</plugin>\n\t\t\t\t<plugin>\n\t\t\t\t\t<groupId>org.codehaus.mojo</groupId>\n\t\t\t\t\t<artifactId>build-helper-maven-plugin</artifactId>\n\t\t\t\t\t<version>1.7</version>\n\n\t\t\t\t\t<executions>\n\t\t\t\t\t\t<execution>\n\t\t\t\t\t\t\t<id>add-source</id>\n\t\t\t\t\t\t\t<phase>generate-sources</phase>\n\t\t\t\t\t\t\t<goals>\n\t\t\t\t\t\t\t\t<goal>add-source</goal>\n\t\t\t\t\t\t\t</goals>\n\t\t\t\t\t\t\t<configuration>\n\t\t\t\t\t\t\t\t<sources>\n\t\t\t\t\t\t\t\t\t<source>generated</source>\n\t\t\t\t\t\t\t\t</sources>\n\t\t\t\t\t\t\t</configuration>\n\t\t\t\t\t\t</execution>\n\t\t\t\t\t</executions>\n\t\t\t\t</plugin>\n\t\t\t\t<plugin>\n\t\t\t\t\t<artifactId>maven-compiler-plugin</artifactId>\n\t\t\t\t\t<version>3.1</version>\n\t\t\t\t\t<configuration>\n\t\t\t\t\t\t<source>1.7</source>\n\t\t\t\t\t\t<target>1.7</target>\n\t\t\t\t\t</configuration>\n\t\t\t\t</plugin>\n\t\t\t</plugins>\n\t\t</pluginManagement>\n\t</build>\n</project>\n" }, { "alpha_fraction": 0.6875311136245728, "alphanum_fraction": 0.7456417083740234, "avg_line_length": 38.35293960571289, "blob_id": "741e943b7c776313419022cbb55f97fbfcdff816", "content_id": "414bbcd1a995dcfa36f2c589398ad72d4196a960", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 6165, "license_type": "no_license", "max_line_length": 205, "num_lines": 153, "path": "/centos编译安装ice3.6.x.sh", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "#!/bin/bash\n#\n# 作者:邓燎燕\n# 2016-01-05\n# \n# 这个是在阿里云的Centos 6.5 64位的安装脚本\n#\n\nmkdir -p {downloads/ice,downloads/jdk}\nscp [email protected]:/root/downloads/jdk/jdk-7u80-linux-x64.tar.gz /root/downloads/jdk/\nscp [email protected]:/root/downloads/ice/bcprov-jdk15on-153.jar /root/downloads/ice/\nscp [email protected]:/root/downloads/ice/berkeley-db53-5.3.28.NC.brew.tar.gz /root/downloads/ice/\nscp [email protected]:/root/downloads/ice/bzip2-1.0.6.tar.gz /root/downloads/ice/\nscp [email protected]:/root/downloads/ice/expat-2.1.0.tar.gz /root/downloads/ice/\nscp [email protected]:/root/downloads/ice/icecertutils.zip /root/downloads/ice/\nscp [email protected]:/root/downloads/ice/libiconv-1.14.tar.gz /root/downloads/ice/\nscp [email protected]:/root/downloads/ice/mcpp-2.7.2.tar.gz /root/downloads/ice/\nscp [email protected]:/root/downloads/ice/openssl-1.0.1g.tar.gz /root/downloads/ice/\nscp [email protected]:/root/downloads/ice/ice-3.6.2.tar.gz /root/downloads/ice/\n\nyum -y install unzip zip bzip2 bzip2-devel tar gcc g++ gcc-c++ automake autoconf libtool make expat openssl openssl-devel libffi-devel libiconv python-devel mcpp\n\n#cd /root/downloads/ice\n#wget https://git.oschina.net/dengly/Ice-demo/raw/master/iceFiles/bcprov-jdk15on-153.rename\n#mv bcprov-jdk15on-153.rename bcprov-jdk15on-153.jar\n\n# 安装jdk\ncd /root/downloads/jdk\n#wget http://download.oracle.com/otn-pub/java/jdk/7u80-b15/jdk-7u80-linux-x64.tar.gz\ntar zxvf jdk-7u80-linux-x64.tar.gz\nmv jdk1.7.0_80 /usr/local/jdk7\ncp /root/downloads/ice/bcprov-jdk15on-153.jar /usr/local/jdk7/jre/lib/ext/\necho 'security.provider.x=org.bouncycastle.jce.provider.BouncyCastleProvider' >> /usr/local/jdk7/jre/lib/security/java.security\n## 添加环境变量\necho 'JAVA_HOME=/usr/local/jdk7' >> /etc/profile\necho 'JRE_HOME=$JAVA_HOME/jre' >> /etc/profile\necho 'CLASSPATH=.:$JRE_HOME/lib/jsse.jar:$JRE_HOME/lib/jfxrt.jar:$JAVA_HOME/lib/rt.jar:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar:$JRE_HOME/lib/ext/bcprov-jdk15on-153.jar:$JRE_HOME/lib' >> /etc/profile\necho 'PATH=$PATH:$JAVA_HOME/bin:$JRE_HOME/bin' >> /etc/profile\necho 'export JAVA_HOME JRE_HOME CLASSPATH PATH' >> /etc/profile\nsource /etc/profile\n\n# 安装libiconv\n#cd /root/downloads/ice\n#tar zxvf libiconv-1.14.tar.gz\n#cd libiconv-1.14\n#./configure --prefix=/usr/local\n#make && make install\n\n\n# 安装expat 2.0\ncd /root/downloads/ice\n#wget http://nchc.dl.sourceforge.net/project/expat/expat/2.1.0/expat-2.1.0.tar.gz\n#wget https://raw.githubusercontent.com/dengly/Ice-demo/master/iceFiles/expat-2.1.0.tar.gz\n#wget https://git.oschina.net/dengly/Ice-demo/raw/master/iceFiles/expat-2.1.0.tar.gz\ntar zxvf expat-2.1.0.tar.gz\ncd expat-2.1.0\n./configure\nmake && make install\n## 添加环境变量\necho 'LD_LIBRARY_PATH=/usr/local/lib' >> /etc/profile\necho 'LD_RUN_PATH=/usr/local/lib' >> /etc/profile\n\n\n# 安装openssl\ncd /root/downloads/ice\n#wget http://www.openssl.org/source/openssl-1.0.1g.tar.gz\n#wget https://raw.githubusercontent.com/dengly/Ice-demo/master/iceFiles/openssl-1.0.1g.tar.gz\n#wget https://git.oschina.net/dengly/Ice-demo/raw/master/iceFiles/openssl-1.0.1g.tar.gz\ntar zxvf openssl-1.0.1g.tar.gz\ncd openssl-1.0.1g\n./config --prefix=/usr/local/openssl\nmake && make install\n\n\n# 安装bzip2\ncd /root/downloads/ice\n#wget http://www.bzip.org/1.0.6/bzip2-1.0.6.tar.gz\n#wget https://raw.githubusercontent.com/dengly/Ice-demo/master/iceFiles/bzip2-1.0.6.tar.gz\n#wget https://git.oschina.net/dengly/Ice-demo/raw/master/iceFiles/bzip2-1.0.6.tar.gz\ntar zxvf bzip2-1.0.6.tar.gz\ncd bzip2-1.0.6\nmake && make install\n\n\n# 安装mcpp\ncd /root/downloads/ice\n#wget http://jaist.dl.sourceforge.net/project/mcpp/mcpp/V.2.7.2/mcpp-2.7.2.tar.gz\n#wget https://raw.githubusercontent.com/dengly/Ice-demo/master/iceFiles/mcpp-2.7.2.tar.gz\n#wget https://git.oschina.net/dengly/Ice-demo/raw/master/iceFiles/mcpp-2.7.2.tar.gz\ntar zxvf mcpp-2.7.2.tar.gz\ncd mcpp-2.7.2\n./configure CFLAGS=-fPIC --enable-mcpplib --disable-shared \nmake && make install\n\n\n# 安装Berkeley DB\ncd /root/downloads/ice\n#wget http://download.oracle.com/otn/berkeley-db/db-5.3.28.tar.gz\n#wget https://raw.githubusercontent.com/dengly/Ice-demo/master/iceFiles/berkeley-db53-5.3.28.NC.brew.tar.gz\n#wget https://git.oschina.net/dengly/Ice-demo/raw/master/iceFiles/berkeley-db53-5.3.28.NC.brew.tar.gz\ntar zxvf berkeley-db53-5.3.28.NC.brew.tar.gz\ncd db-5.3.28.NC/build_unix/\n../dist/configure --prefix=/usr/local/berkeleydb --enable-cxx --enable-java\nmake && make install\ncp -rf /usr/local/berkeleydb/lib /usr/local/berkeleydb/lib64\n#echo '/usr/local/berkeleydb/lib/' >> /etc/ld.so.conf\n#ldconfig\n#cd ../..\n## 添加环境变量\necho 'LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/berkeleydb:/usr/lib64' >> /etc/profile\necho 'LD_RUN_PATH=$LD_RUN_PATH:/usr/local/berkeleydb/bin' >> /etc/profile\necho 'CLASSPATH=$CLASSPATH:/usr/local/berkeleydb/lib64/db.jar' >> /etc/profile\necho 'export LD_LIBRARY_PATH LD_RUN_PATH CLASSPATH' >> /etc/profile\nsource /etc/profile\n\nldconfig\n\n# 安装ice\n#编译c++\ncd /root/downloads/ice\n#wget https://github.com/zeroc-ice/ice/archive/ice-3.6.2.tar.gz\n#wget https://raw.githubusercontent.com/dengly/Ice-demo/master/iceFiles/ice-3.6.2.tar.gz\n#wget https://git.oschina.net/dengly/Ice-demo/raw/master/iceFiles/ice-3.6.2.tar.gz\ntar zxvf ice-3.6.2.tar.gz\ncd ice-3.6.2/cpp\nmake\nmake install\n#编译java\ncd ../java\necho 'ICE_HOME=/usr/local/Ice-3.6.2' >> /etc/profile\necho 'PATH=$PATH:$ICE_HOME/bin' >> /etc/profile\necho 'export PATH ICE_HOME' >> /etc/profile\nsource /etc/profile\n./gradlew build\n./gradlew install\necho 'CLASSPATH=$CLASSPATH:/usr/local/Ice-3.6.2/share/java/' >> /etc/profile\necho 'export CLASSPATH' >> /etc/profile\nsource /etc/profile\n\n\n# 安装iceca\ncd /root/downloads/ice\n#wget https://bootstrap.pypa.io/ez_setup.py -O - | python\nwget --no-check-certificate https://bootstrap.pypa.io/ez_setup.py\npython ez_setup.py --insecure\neasy_install pycrypto\neasy_install zeroc-icecertutils\n#将bcprov-jdk15on-153.jar复制到/usr/local/jdk7/jre/lib/ext下\n#配置环境变量ICE_CA_HOME\nmkdir -p /home/Ice/ca\necho 'ICE_CA_HOME=/home/Ice/ca' >> /etc/profile\necho 'export ICE_CA_HOME' >> /etc/profile\nsource /etc/profile\n\n\n" }, { "alpha_fraction": 0.6407185792922974, "alphanum_fraction": 0.6597713828086853, "avg_line_length": 50.73239517211914, "blob_id": "80e4ee8911cfcea729510c9fd838dda38c2dbf6a", "content_id": "3c56f36cd586b15742e1e9641187c256ea9a03c8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 3774, "license_type": "no_license", "max_line_length": 249, "num_lines": 71, "path": "/mymakecert.sh", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "#!/bin/bash\n#\n# 作者:邓燎燕\n# 2015-12-25\n# \n# 要配置好iceca的ICE_CA_HOME环境变量\n# 我的ca、server和client密码都是123456\n\necho \"------------ iceca init --------------\"\niceca init\n\necho \"------------ iceca create server and client --------------\"\niceca create --ip=8.8.8.8 --dns=www.baidu.com server\n\niceca create client\n\necho \"------------ iceca export cert --------------\"\niceca export --password 123456 --alias ca ./ca/ca.cer\niceca export --password 123456 --alias client ./ca/client.cer\niceca export --password 123456 --alias server ./ca/server.cer\n\necho \"------------ iceca export jks --------------\"\niceca export --password 123456 --alias ca ./ca/ca.jks\niceca export --password 123456 --alias client ./ca/client.jks\niceca export --password 123456 --alias server ./ca/server.jks\n\necho \"------------ iceca export bks --------------\"\niceca export --password 123456 --alias ca ./ca/ca.bks\niceca export --password 123456 --alias client ./ca/client.bks\niceca export --password 123456 --alias server ./ca/server.bks\n\necho \"------------ iceca export p12 --------------\"\niceca export --password 123456 --alias ca ./ca/ca.p12\niceca export --password 123456 --alias client ./ca/client.p12\niceca export --password 123456 --alias server ./ca/server.p12\n\necho \"------------ keytool -import --------------\"\nkeytool -import -v -trustcacerts -alias ca -file ./ca/ca.cer -storepass 123456 -keystore ./ca/server.jks\nkeytool -import -v -trustcacerts -alias ca -file ./ca/ca.cer -storepass 123456 -keystore ./ca/client.jks\n\nkeytool -import -v -trustcacerts -alias ca -file ./ca/ca.cer -storepass 123456 -keystore ./ca/server.bks -storetype bks -provider org.bouncycastle.jce.provider.BouncyCastleProvider -providerpath /usr/local/jdk7/jre/lib/ext/bcprov-jdk15on-153.jar\nkeytool -import -v -trustcacerts -alias ca -file ./ca/ca.cer -storepass 123456 -keystore ./ca/client.bks -storetype bks -provider org.bouncycastle.jce.provider.BouncyCastleProvider -providerpath /usr/local/jdk7/jre/lib/ext/bcprov-jdk15on-153.jar\n\necho \"--------------------------\"\nkeytool -list -keystore ./ca/ca.p12 -storetype pkcs12 -v -storepass 123456\necho \"--------------------------\"\nkeytool -list -keystore ./ca/ca.jks -storepass 123456 -v\necho \"--------------------------\"\nkeytool -list -keystore ./ca/ca.bks -storetype bks -storepass 123456 -v -provider org.bouncycastle.jce.provider.BouncyCastleProvider -providerpath /usr/local/jdk7/jre/lib/ext/bcprov-jdk15on-153.jar\necho \"--------------------------\"\nkeytool -list -keystore ./ca/server.p12 -storetype pkcs12 -v -storepass 123456\necho \"--------------------------\"\nkeytool -list -keystore ./ca/server.jks -storepass 123456 -v\necho \"--------------------------\"\nkeytool -list -keystore ./ca/server.bks -storetype bks -storepass 123456 -v -provider org.bouncycastle.jce.provider.BouncyCastleProvider -providerpath /usr/local/jdk7/jre/lib/ext/bcprov-jdk15on-153.jar\necho \"--------------------------\"\nkeytool -list -keystore ./ca/client.p12 -storetype pkcs12 -v -storepass 123456\necho \"--------------------------\"\nkeytool -list -keystore ./ca/client.jks -storepass 123456 -v\necho \"--------------------------\"\nkeytool -list -keystore ./ca/client.bks -storepass 123456 -v -storetype bks -provider org.bouncycastle.jce.provider.BouncyCastleProvider -providerpath /usr/local/jdk7/jre/lib/ext/bcprov-jdk15on-153.jar\n\necho \"\"\necho \"\"\necho \"--------------------------\"\necho \"配置说明\"\necho \"Glacier2的配置IceSSL.CAs=ca.pem,IceSSL.CertFile=server.p12\"\necho \"纯Java客户端使用client.jks\"\necho \"Android客户端使用client.bks\"\necho \"iOS客户端使用ca.cer和client.p12,IceSSL.CAs=ca.cer,IceSSL.CertFile=client.p12\"\necho \"\"\n\n" }, { "alpha_fraction": 0.740334689617157, "alphanum_fraction": 0.7836122512817383, "avg_line_length": 60.89285659790039, "blob_id": "0fa77ba0729a34c75e5d58dbe795d95ddbfc4048", "content_id": "0dfa0383bdd24feacbdf1d4c08a0692954cdba12", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 1803, "license_type": "no_license", "max_line_length": 129, "num_lines": 28, "path": "/简易安装.sh", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "# Includes: Runtime and SDK for Ice for C++, Java, PHP, and all Ice services.\n\n# Red Hat Enterprise Linux 7\n# Prerequisite: You may need to enable the RHEL 7 Server Optional repository for several dependencies.\n#cd /etc/yum.repos.d\n#sudo wget https://zeroc.com/download/rpm/zeroc-ice-el7.repo\n#sudo yum -y install ice-all-runtime ice-all-devel\n\n# Red Hat Enterprise Linux 6\ncd /etc/yum.repos.d\nsudo wget https://zeroc.com/download/rpm/zeroc-ice-el6.repo\nsudo yum -y install ice-all-runtime ice-all-devel\n\n# 以上操作仅安装了ice所有的服务和指令 并没有库,如需java的jar包,这需要自行上传\n\n# jar包:\nmkdir -p /usr/local/Ice-3.6.2/share/java\ncd /usr/local/Ice-3.6.2/share/java\nwget https://repo.zeroc.com/nexus/content/repositories/releases/com/zeroc/freeze/3.6.2/freeze-3.6.2.jar\nwget https://repo.zeroc.com/nexus/content/repositories/releases/com/zeroc/glacier2/3.6.2/glacier2-3.6.2.jar\nwget https://repo.zeroc.com/nexus/content/repositories/releases/com/zeroc/ice/3.6.2/ice-3.6.2.jar\nwget https://repo.zeroc.com/nexus/content/repositories/releases/com/zeroc/icebox/3.6.2/icebox-3.6.2.jar\nwget https://repo.zeroc.com/nexus/content/repositories/releases/com/zeroc/icediscovery/3.6.2/icediscovery-3.6.2.jar\nwget https://repo.zeroc.com/nexus/content/repositories/releases/com/zeroc/icegrid/3.6.2/icegrid-3.6.2.jar\nwget https://repo.zeroc.com/nexus/content/repositories/releases/com/zeroc/icegridgui/3.6.2/icegridgui-3.6.2.jar\nwget https://repo.zeroc.com/nexus/content/repositories/releases/com/zeroc/icelocatordiscovery/3.6.2/icelocatordiscovery-3.6.2.jar\nwget https://repo.zeroc.com/nexus/content/repositories/releases/com/zeroc/icepatch2/3.6.2/icepatch2-3.6.2.jar\nwget https://repo.zeroc.com/nexus/content/repositories/releases/com/zeroc/icestorm/3.6.2/icestorm-3.6.2.jar\n" }, { "alpha_fraction": 0.6425498127937317, "alphanum_fraction": 0.6860557794570923, "avg_line_length": 40.282894134521484, "blob_id": "7861d66397a3ffe64157abd15a310d5ac742d982", "content_id": "f28e46bae1fc56f2645ac43a92b3782db729030b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 14128, "license_type": "no_license", "max_line_length": 228, "num_lines": 304, "path": "/README.md", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "# Ice\n尊重作者研究,转载请注明出处。\n\n========\n\n创建时间 | 2015-12-28 | |\n--- | --- | ---\n创建人 | 邓燎燕\n版本号 | 1.0\n修改时间 | 修改人 | 修改内容\n2017-04-13 | 邓燎燕 | 添加[Centos安装Ice的PHP.md](./Centos安装Ice的PHP.md)\n2017-03-09 | 邓燎燕 | 添加keystore-explorer地址\n2016-01-18 | 邓燎燕 | 修复IceApp脚本bug,修改了Ice分布式服务配置说明,添加了IceGrid和IcePatch2的集成,也可以参考[http://blog.csdn.net/pj81102/article/details/5910545](http://blog.csdn.net/pj81102/article/details/5910545)\n2016-01-11 | 邓燎燕 | 添加bcprov-jdk15on-153,修改了安装脚本\n2016-01-11 | 邓燎燕 | 添加Ice分布式服务配置说明\n2016-01-06 | 邓燎燕 | 添加文件和安装脚本\n\n###### 说明\n* 本文的所有研究是基于[Ice 3.6.1](https://github.com/zeroc-ice/ice/tree/3.6.1)\n* iOS端的开发依赖包[icetouch](https://github.com/zeroc-ice/icetouch)\n* 在Ice 3.6.1里不再有iceca这个证书生成工具,已经迁移到[icecertutils](https://github.com/zeroc-ice/icecertutils),另发现一个比较好用的证书工具[keystore-explorer](https://github.com/kaikramer/keystore-explorer/tree/master/kse)\n* 服务器:CentOS 6.5 64位\n* JDK:[jdk1.7.0_79](http://download.oracle.com/otn/java/jdk/7u79-b15/jdk-7u79-linux-x64.tar.gz)\n* 个人的例子是参考[《ZeroC Ice权威指南》](http://item.jd.com/10026458225.html)和[ice-demo](https://github.com/zeroc-ice/ice-demos)\n* Ice 编译依赖 [iceFiles](./iceFiles)\n* Java 服务端 [iceTicketProject](./java/iceTicketProject)\n* Android 客户端 [android/IceClient](./android/IceClient)\n* JavaScript 客户端 [js/IceClient](./js/IceClient)\n* iOS 客户端 [objc/IceClient](./objc/IceClient)\n\n========\n\n# 编译安装\n## c++的编译安装请参考ice/cpp/BuildInstructionsLinux.md\n依赖的第三方库有expat 2.0、OpenSSL 0.9.8 or later、bzip 1.0、Berkeley DB 5.3、mcpp 2.7.2 (with patches),建议能收到编译安装的就收到编译安装,尽量不要使用yum这类工具,免得有奇葩的错误。这个我在CentOS和Mac OS X都安装了。\n\n## Java的编译安装请参考ice/java/BuildInstructions.md\n在CentOS上安装,我是没有成功过,但是在Mac OS X上成功了,所有我就直接将Mac OS X上编译好的jar包都上传到CentOS上了。\n\n## Objective-C的编译安装请参考ice/objective-c/BuildInstructions.md\n\n## JS库\nIce的js库可以到该连接下载[http://cdnjs.com/libraries/ice](#http://cdnjs.com/libraries/ice)\n\n========\n\n# 案例\n## IceGrid+IceBox+Spring\n我参考了[《ZeroC Ice权威指南》](http://item.jd.com/10026458225.html)中的第6章在线订票系统写了例子。\n## IceGrid+Glacier2使用SSL和WSS通讯\n我参考了[ice-demo](https://github.com/zeroc-ice/ice-demos)编写了客户端的代码。但是在使用SSL和WSS通讯时要生成相关证书,证书的生成请参考mymakecert.sh。\n\n## 结构图\n<img src=\"structure.png\" style=\"max-width:100%\"/>\n\n## 请求说明\n客户端(Android和iOS)通过SSL和WSS向Glacier2发送请求,Glacier2在通过TCP向IceGrid获取相关结果后返回给客户端。\n\n到目前为止,经测试纯Java客户端、Android、iOS使用加密或非加密都成功,而提供的网页前端使用WS成功,使用WSS没能成功,希望有人能完善或提供网页前端使用WSS的说明。\n\n## 部署\n* 路径/home/Ice\n* 创建Ice_gridregistry、Ice_gridnode、registry、node、node/data、Ice_glacier2、grid、logs、Ice_glacier2/sslstore\n* 将生成的证书放在Ice_glacier2/sslstore下\n* 将IceGrid配置写入Ice_gridregistry/icegridregistry.cfg\n* 将IceNode配置写入Ice_gridnode/icegridnode1.cfg\n* 将Glacier2配置写入Ice_glacier2/iceglacier2.cfg\n* 将iceTicketProject应用的服务端放在/home/Ice下\n* 运行IceApp.sh脚本\n* 在运行IceAdmin.sh加载应用的ticketgrid.xml配置文件,启动服务\n\n========\n\n# 附件\n## mymakecert.sh\n\t#!/bin/bash\n\t#\n\t# 作者:邓燎燕\n\t# 2015-12-25\n\t# \n\t# 要配置好iceca的ICE_CA_HOME环境变量\n\t# 我的ca、server和client密码都是123456\n\n\techo \"------------ iceca init --------------\"\n\ticeca init\n\n\techo \"------------ iceca create server and client --------------\"\n\ticeca create --ip=192.168.0.112 --dns=192.168.0.112 server\n\n\ticeca create client\n\n\techo \"------------ iceca export cert --------------\"\n\ticeca export --password 123456 --alias ca ./ca/ca.cer\n\ticeca export --password 123456 --alias client ./ca/client.cer\n\ticeca export --password 123456 --alias server ./ca/server.cer\n\n\techo \"------------ iceca export jks --------------\"\n\ticeca export --password 123456 --alias ca ./ca/ca.jks\n\ticeca export --password 123456 --alias client ./ca/client.jks\n\ticeca export --password 123456 --alias server ./ca/server.jks\n\n\techo \"------------ iceca export bks --------------\"\n\ticeca export --password 123456 --alias ca ./ca/ca.bks\n\ticeca export --password 123456 --alias client ./ca/client.bks\n\ticeca export --password 123456 --alias server ./ca/server.bks\n\n\techo \"------------ iceca export p12 --------------\"\n\ticeca export --password 123456 --alias ca ./ca/ca.p12\n\ticeca export --password 123456 --alias client ./ca/client.p12\n\ticeca export --password 123456 --alias server ./ca/server.p12\n\n\techo \"------------ keytool -import --------------\"\n\tkeytool -import -v -trustcacerts -alias ca -file ./ca/ca.cer -storepass 123456 -keystore ./ca/server.jks\n\tkeytool -import -v -trustcacerts -alias ca -file ./ca/ca.cer -storepass 123456 -keystore ./ca/client.jks\n\n\tkeytool -import -v -trustcacerts -alias ca -file ./ca/ca.cer -storepass 123456 -keystore ./ca/server.bks -storetype bks -provider org.bouncycastle.jce.provider.BouncyCastleProvider -providerpath /path/bcprov-jdk15on-153.jar\n\tkeytool -import -v -trustcacerts -alias ca -file ./ca/ca.cer -storepass 123456 -keystore ./ca/client.bks -storetype bks -provider org.bouncycastle.jce.provider.BouncyCastleProvider -providerpath /path/bcprov-jdk15on-153.jar\n\n\techo \"--------------------------\"\n\tkeytool -list -keystore ./ca/ca.p12 -storetype pkcs12 -v -storepass 123456\n\techo \"--------------------------\"\n\tkeytool -list -keystore ./ca/ca.jks -storepass 123456 -v\n\techo \"--------------------------\"\n\tkeytool -list -keystore ./ca/ca.bks -storetype bks -storepass 123456 -v -provider org.bouncycastle.jce.provider.BouncyCastleProvider -providerpath /path/bcprov-jdk15on-153.jar\n\techo \"--------------------------\"\n\tkeytool -list -keystore ./ca/server.p12 -storetype pkcs12 -v -storepass 123456\n\techo \"--------------------------\"\n\tkeytool -list -keystore ./ca/server.jks -storepass 123456 -v\n\techo \"--------------------------\"\n\tkeytool -list -keystore ./ca/server.bks -storetype bks -storepass 123456 -v -provider org.bouncycastle.jce.provider.BouncyCastleProvider -providerpath /path/bcprov-jdk15on-153.jar\n\techo \"--------------------------\"\n\tkeytool -list -keystore ./ca/client.p12 -storetype pkcs12 -v -storepass 123456\n\techo \"--------------------------\"\n\tkeytool -list -keystore ./ca/client.jks -storepass 123456 -v\n\techo \"--------------------------\"\n\tkeytool -list -keystore ./ca/client.bks -storepass 123456 -v -storetype bks -provider org.bouncycastle.jce.provider.BouncyCastleProvider -providerpath /path/bcprov-jdk15on-153.jar\n\n\techo \"\"\n\techo \"\"\n\techo \"--------------------------\"\n\techo \"配置说明\"\n\techo \"Glacier2的配置IceSSL.CAs=ca.pem,IceSSL.CertFile=server.p12\"\n\techo \"纯Java客户端使用client.jks\"\n\techo \"Android客户端使用client.bks\"\n\techo \"iOS客户端使用ca.cer和client.p12,配置IceSSL.CAs=ca.cer,IceSSL.CertFile=client.p12\"\n\techo \"\"\n\n## Glacier2配置\n\t#\n\t# Set the instance name\n\t#\n\tGlacier2.InstanceName=DemoGlacier2\n\n\t#\n\t# The client-visible endpoint of Glacier2. This should be an endpoint\n\t# visible from the public Internet, and it should be secure.\n\t#\n\tGlacier2.Client.Endpoints=wss -p 4064:ssl -p 4063\n\n\t# IceGrid+Glacier2的关键\n\tIce.Default.Locator=IceGrid/Locator:tcp -h localhost -p 4061\t\n\tGlacier2.Server.Endpoints=tcp -h localhost\n\tGlacier2.PermissionsVerifier=DemoGlacier2/NullPermissionsVerifier\n\tGlacier2.SessionTimeout=30\n\tGlacier2.Client.ForwardContext=1\n\tGlacier2.Server.ForwardContext=1\n\tGlacier2.Client.SleepTime=500\n\tGlacier2.Server.SleepTime=500\n\tGlacier2.Client.Trace.Request=1\n\tGlacier2.Server.Trace.Request=1\n\tGlacier2.Client.Trace.Override=1\n\tGlacier2.Server.Trace.Override=1\n\tGlacier2.Client.Trace.Reject=1\n\tGlacier2.Trace.Session=1\n\tGlacier2.Trace.RoutingTable=1\n\tIce.Warn.Connections=1\n\tIce.Trace.Network=1\n\tIce.Trace.Protocol=1\n\tIceSSL.Trace.Security=1\n\n\t#\n\t# SSL Configuration\n\t#\n\tIce.Plugin.IceSSL=IceSSL:createIceSSL\n\tIceSSL.Protocols=SSL3, TLS1_0, TLS1_1, TLS1_2\n\tIceSSL.DefaultDir=/home/Ice/Ice_glacier2/sslstore\n\tIceSSL.CAs=ca.pem\n\tIceSSL.CertFile=server.p12\n\tIceSSL.Password=123456\n\tIceSSL.Keychain=glacier2.keychain\n\tIceSSL.KeychainPassword=123456\n\n## IceGrid配置\n\tIceGrid.Registry.Client.Endpoints=tcp -p 4061:ws -p 4062\n\tIceGrid.Registry.Server.Endpoints=tcp\n\tIceGrid.Registry.Internal.Endpoints=tcp\n\tIceGrid.Registry.AdminPermissionsVerifier=IceGrid/NullPermissionsVerifier\n\tIceGrid.Registry.Data=./registry\n\tIceGrid.Registry.DynamicRegistration=1\n\tIce.Admin.InstanceName=AdminInstance\n\tIce.Admin.ServerId=Admin\n\n## IceNode配置\n\t#ice node config for ticketnode1\n\t#指定主注册节点的位置\n\tIce.Default.Locator=IceGrid/Locator:tcp -h 127.0.0.1 -p 4061:ws -h 127.0.0.1 -p 4062\n\t#设置节点1相关数据的存储目录\n\tIceGrid.Node.Data=/home/Ice/node/data\n\t#指定节点1用于监听客户端连接的端口\n\tIceGrid.Node.Endpoints=tcp -p 5062\n\t#指定节点1的名称\n\tIceGrid.Node.Name=node1\n\t#指定错误日志文件\n\tIce.StdErr=/home/Ice/node/node.stderr.log\n\tIce.StdOut=/home/Ice/node/node.stdout.log\n\n## 应用的服务端配置\n\t<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\t<icegrid>\n <application name=\"MyTicketBookSystem\">\n <properties id=\"MultiThreaded\">\n <property name=\"Ice.PrintStackTraces\" value=\"1\" />\n <property name=\"Ice.ThreadPool.Client.Size\" value=\"2\" />\n <property name=\"Ice.ThreadPool.Client.SizeMax\" value=\"50\" />\n <property name=\"Ice.ThreadPool.Server.Size\" value=\"10\" />\n <property name=\"Ice.ThreadPool.Server.SizeMax\" value=\"100\" />\n <property name=\"IceBox.InheritProperties\" value=\"1\" />\n <property name=\"Ice.Override.ConnectTimeout\" value=\"5000\" />\n <property name=\"Ice.Override.Timeout\" value=\"10000\" />\n <property name=\"IceBox.Trace.ServiceObserver\" value=\"1\" />\n <property name=\"Ice.Default.LocatorCacheTimeout\" value=\"300\" />\n <property name=\"Ice.BackgroundLocatorCacheUpdates\" value=\"1\" />\n\n <property name=\"Ice.Trace.Retry\" value=\"2\" />\n <property name=\"Ice.Trace.Network\" value=\"1\" />\n <property name=\"Ice.Trace.ThreadPool\" value=\"1\" />\n <property name=\"Ice.Trace.Locator\" value=\"1\" />\n <property name=\"Ice.StdErr\" value=\"/home/Ice/grid/stderr.log\" />\n <property name=\"Ice.StdOut\" value=\"/home/Ice/grid/stdout.log\" />\n </properties>\n <server-template id=\"TicketOrderServerTemplate\"><!-- 定义服务器模板 -->\n <parameter name=\"id\" />\n <icebox id=\"TicketOrderServer${id}\" exe=\"java\" activation=\"on-demand\">\n <properties>\n <properties refid=\"MultiThreaded\" />\n </properties>\n <option>-Xmx512M</option>\n <option>-DAppHome=/home/Ice</option>\n <option>-DAppId=TicketOrderServer${id}</option>\n <!-- 指定启动类,使用Sl4jIceBoxServer替代IceBox.Server作为IceBox的启动类 -->\n <option>com.zzwtec.iceTicketProject.ice.Sl4jIceBoxServer</option>\n <env>CLASSPATH=.:/opt/Ice-3.6.1/lib/*:/home/libs/*:/home/Ice/iceTicketProject</env>\n <service name=\"TicketService\" entry=\"com.zzwtec.iceTicketProject.ice.service.MyTicketService\">\n <adapter name=\"TicketService\" id=\"TicketService${id}\" endpoints=\"tcp:ws\" replica-group=\"TicketServiceRep\"></adapter>\n </service>\n </icebox>\n </server-template>\n <replica-group id=\"TicketServiceRep\"><!-- 定义适配器复制组 -->\n <load-balancing type=\"round-robin\" n-replicas=\"0\" />\n <object identity=\"TicketService\" type=\"::ticket::TicketService\" /><!-- identity将在客户 -->\n </replica-group>\n <node name=\"node1\">\n <server-instance template=\"TicketOrderServerTemplate\" id=\"1\" />\n <server-instance template=\"TicketOrderServerTemplate\" id=\"2\" />\n </node>\n </application>\n\t</icegrid>\n\n## 纯Java客户端配置\n\tIce.Default.Router=DemoGlacier2/router:ssl -p 4063 -h 192.168.0.112 -t 10000:wss -p 4064 -h 192.168.0.112 -t 10000\n\tIce.RetryIntervals=-1\n\tIce.Trace.Network=0\n\tIce.Plugin.IceSSL=IceSSL.PluginFactory\n\tIceSSL.DefaultDir=/certs/path\n\tIceSSL.VerifyPeer=0\n\tIceSSL.Trace.Security=1\n\tIceSSL.KeystoreType=JKS\n\tIceSSL.Keystore=client.jks\n\tIceSSL.Password=123456\n\n## Android客户端配置\n\tIce.Default.Router=DemoGlacier2/router:ssl -p 4063 -h 192.168.0.112 -t 10000:wss -p 4064 -h 192.168.0.112 -t 10000\n\tIce.RetryIntervals=-1\n\tIce.Trace.Network=0\n\tIce.Plugin.IceSSL=IceSSL.PluginFactory\n\tIce.InitPlugins=0\n\tIceSSL.VerifyPeer=0\n\tIceSSL.Trace.Security=1\n\tIceSSL.KeystoreType=BKS\n\tIceSSL.Password=123456\n\tIceSSL.UsePlatformCAs=0\n\n## iOS客户端配置\n\tIce.Default.Router=DemoGlacier2/router:ssl -p 4063 -h 192.168.0.112 -t 10000:wss -p 4064 -h 192.168.0.112 -t 10000\n\tIce.Trace.Locator=1\n\tIce.ACM.Client.Timeout=0\n\tIce.RetryIntervals=-1\n\tIce.Plugin.IceSSL=IceSSL:createIceSSL\n\tIceSSL.DefaultDir=./\n\tIceSSL.CAs=ca.cer\n\tIceSSL.CertFile=client.p12\n\tIceSSL.Password=123456\n\t#IceSSL.Keychain=client.keychain\n\t#IceSSL.KeychainPassword=123456\n" }, { "alpha_fraction": 0.6456762552261353, "alphanum_fraction": 0.6576496958732605, "avg_line_length": 42.36538314819336, "blob_id": "142c547ca178d30482f0b3f9d27ab5448988ec3c", "content_id": "4d857ad892806765f03336a440922dc9ecd461d1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2255, "license_type": "no_license", "max_line_length": 111, "num_lines": 52, "path": "/android/IceClient/src/com/zzwtec/iceTicketProject/util/ice/glacier2/IceGlacier2Util.java", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "package com.zzwtec.iceTicketProject.util.ice.glacier2;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.util.Properties;\n\nimport android.content.Context;\nimport android.os.Build;\n\npublic class IceGlacier2Util {\n\tprivate static Glacier2.SessionFactoryHelper sessionFactoryHelper;\n\tpublic static Glacier2.SessionHelper sessionHelper;\n\t\n\tpublic static void connect(Context context,Glacier2.SessionCallback callback){\n\t\tIce.InitializationData initData = new Ice.InitializationData();\n\t\tinitData.properties = Ice.Util.createProperties();\n\t\t\n\t\ttry {\n InputStream inputStream = context.getResources().getAssets().open(\"iceclient_glacier2.properties\");\n Properties properties = new Properties();\n properties.load(inputStream);\n for (String name : properties.stringPropertyNames()) {\n \tif(name.equals(\"idleTimeOutSeconds\")){\n \t\tcontinue;\n \t}\n String value = properties.getProperty(name);\n initData.properties.setProperty(name, value);\n }\n// initData.properties.setProperty(\"Ice.RetryIntervals\", \"-1\");\n// initData.properties.setProperty(\"Ice.Trace.Network\", \"0\");\n// initData.properties.setProperty(\"Ice.Plugin.IceSSL\", \"IceSSL.PluginFactory\");\n\n // SDK versions < 21 only support TLSv1 with SSLEngine.\n if(Build.VERSION.SDK_INT < 21) {\n initData.properties.setProperty(\"IceSSL.Protocols\", \"TLS1_0\");\n }\n\n// if(initData.properties.getPropertyAsIntWithDefault(\"IceSSL.UsePlatformCAs\", 0) == 0) {\n// \tinitData.properties.setProperty(\"Ice.InitPlugins\", \"0\");\n// \tinitData.properties.setProperty(\"IceSSL.VerifyPeer\", \"0\");\n// initData.properties.setProperty(\"IceSSL.Trace.Security\", \"1\");\n// initData.properties.setProperty(\"IceSSL.KeystoreType\", \"BKS\");\n// initData.properties.setProperty(\"IceSSL.Password\", \"123456\");\n// }\n } catch(IOException ex) {\n ex.printStackTrace();\n }\n\t\t\n\t\tsessionFactoryHelper = new Glacier2.SessionFactoryHelper(initData, callback);\n\t\tsessionHelper = sessionFactoryHelper.connect(\"dly\", \"123456\");\n\t}\n}\n" }, { "alpha_fraction": 0.7665505409240723, "alphanum_fraction": 0.7857142686843872, "avg_line_length": 30.88888931274414, "blob_id": "da074919f61243008d776d6273bb554e79b8bc64", "content_id": "3bd395abf9200332c989d6df6b3dc3e7ae1c88c3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 574, "license_type": "no_license", "max_line_length": 108, "num_lines": 18, "path": "/java/iceTicketProject/src/test/java/com/zzwtec/iceTicketProject/ice/TestIce.java", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "package com.zzwtec.iceTicketProject.ice;\n\nimport com.zzwtec.iceTicketProject.util.IceClientUtil;\nimport com.zzwtec.ticket.ticket.Order;\nimport com.zzwtec.ticket.ticket.TicketServicePrx;\n\npublic class TestIce {\n\tpublic static void main(String[] args) {\n\t\t// TODO Auto-generated method stub\n\t\ttestCall();\n\t}\n\t\n\tpublic static void testCall(){\n\t\tTicketServicePrx ticketServicePrx = (TicketServicePrx)IceClientUtil.getServicePrx(TicketServicePrx.class);\n\t\tOrder[] orders = ticketServicePrx.queryMyOrders(\"13631276694\");\n\t\tSystem.out.println(\"orders.length:\"+orders.length);\n\t}\n}\n" }, { "alpha_fraction": 0.5789576768875122, "alphanum_fraction": 0.5860109925270081, "avg_line_length": 29.74698829650879, "blob_id": "b75b665d3d7525d9e1e7a5ce4082696701753a84", "content_id": "1d42c31dc366a0725819829334524adea3ba2881", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5104, "license_type": "no_license", "max_line_length": 113, "num_lines": 166, "path": "/makecerts-sk163.py", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# **********************************************************************\n#\n# Copyright (c) 2003-2015 ZeroC, Inc. All rights reserved.\n# \n# modify by sk163\n#\n# **********************************************************************\n\nimport os, sys, socket, getopt,getpass\n\ntry:\n import IceCertUtils\nexcept:\n print(\"error: couldn't find IceCertUtils, install `zeroc-icecertutils' package \"\n \"from Python package repository\")\n sys.exit(1)\n \ndef question(message, expected = None):\n sys.stdout.write(message)\n sys.stdout.write(' ')\n sys.stdout.flush()\n choice = sys.stdin.readline().strip()\n if expected:\n return choice in expected\n else:\n return choice\n \n \ndef usage():\n print(\"Usage: \" + sys.argv[0] + \" [options]\")\n print(\"\")\n print(\"Options:\")\n print(\"-h Show this message.\")\n print(\"-d | --debug Debugging output.\")\n print(\"--ip <ip> The IP address for the server certificate.\")\n print(\"--dns <dns> The DNS name for the server certificate.\")\n print(\"--use-dns Use the DNS name for the server certificate common\")\n print(\" name (default is to use the IP address).\" )\n sys.exit(1)\n\n#\n# Check arguments\n#\ndebug = False\nip = None\ndns = None\nusedns = False\nimpl = \"\"\ncreateCA=False\ntry:\n opts, args = getopt.getopt(sys.argv[1:], \"hd\", [\"help\", \"debug\", \"ip=\", \"dns=\",\"use-dns\",\"impl=\"])\nexcept getopt.GetoptError as e:\n print(\"Error %s \" % e)\n usage()\n sys.exit(1)\n\nfor (o, a) in opts:\n if o == \"-h\" or o == \"--help\":\n usage()\n sys.exit(0)\n elif o == \"-d\" or o == \"--debug\":\n debug = True\n elif o == \"--ip\":\n ip = a\n elif o == \"--dns\":\n dns = a\n elif o == \"--use-dns\":\n usedns = True\n elif o == \"--impl\":\n impl = a\n\ndef request(question, newvalue, value):\n while True:\n sys.stdout.write(question)\n sys.stdout.flush()\n input = sys.stdin.readline().strip()\n if input == 'n':\n sys.stdout.write(newvalue)\n sys.stdout.flush()\n return sys.stdin.readline().strip()\n else:\n return value\n\nhome = os.getcwd()\nprint home\n\nif question(\"create CA? (y/n) [n]\", ['y', 'Y']):\n\tif not ip:\n\t try:\n\t #ip = socket.gethostbyname(socket.gethostname())\n\t ip = \"127.0.0.1\"\n\t except:\n\t ip = \"127.0.0.1\"\n\t ip = request(\"The IP address used for the server certificate will be: \" + ip + \"\\n\"\n\t \"Do you want to keep this IP address? (y/n) [y]\", \"IP : \", ip)\n\t\n\tif not dns:\n\t dns = \"localhost\"\n\t dns = request(\"The DNS name used for the server certificate will be: \" + dns + \"\\n\"\n\t \"Do you want to keep this DNS name? (y/n) [y]\", \"DNS : \", dns)\n\t\n\tCertificateFactory = vars(IceCertUtils)[impl + \"CertificateFactory\"]\n\t# Construct the DN for the CA certificate.\n\tDNelements = {\n\t 'C': \"Country name\",\n\t 'ST':\"State or province name\",\n\t 'L': \"Locality\",\n\t 'O': \"Organization name\",\n\t 'OU':\"Organizational unit name\",\n\t 'CN':\"Common name\",\n\t 'emailAddress': \"Email address\"\n\t}\n\t\n\tdn = IceCertUtils.DistinguishedName(\"Ice CertUtils CA\")\n\twhile True:\n\t print(\"\")\n\t print(\"The subject name for your CA will be \" + str(dn))\n\t print(\"\")\n\t if question(\"Do you want to keep this as the CA subject name? (y/n) [y]\", ['n', 'N']):\n\t for k,v in DNelements.items():\n\t v = question(v + \": \")\n\t if k == 'C' and len(v) > 2:\n\t print(\"The contry code can't be longer than 2 characters\")\n\t continue\n\t setattr(dn, k, v)\n\t\n\t else:\n\t break\n\t\n\t#factory = CertificateFactory(debug=debug, cn=\"Ice Demos CA\")\n\tcapass = getpass.getpass(\"Enter the CA passphrase:\")\n\thome = os.path.normpath(home)\n\tfactory =lambda: IceCertUtils.CertificateFactory(home=home, debug=debug, dn=dn, password=capass)\n\t\n\nelse:\n\t\n\t#ca_path=question(\"ca.pem follder path:\");\n\tif not os.path.exists(home+\"/ca.pem\"):\n\t\t\tprint(\"ca.pem not found\");\n\t\t\tsys.exit(1)\n\tcapass = getpass.getpass(\"Enter the CA passphrase:\")\n\tfactory =lambda: IceCertUtils.CertificateFactory(home=home, debug=debug, password=capass)\n\t\n# Client certificate\nif question(\"create Client Cert? (y/n) [n]\", ['y', 'Y']):\n\tclient_alias=question(\"client_alias:\");\n\tclinetpass = getpass.getpass(\"Enter the Client pass passphrase:\")\n\tclient = factory().create(client_alias)\n\tclient.save(client_alias+\".p12\",password=clinetpass).save(client_alias+\".jks\", caalias=\"ca\",password=clinetpass)\n\n# Server certificate\nif question(\"create Server Cert? (y/n) [n]\", ['y', 'Y']):\n\tserver_alias=question(\"server_alias:\");\n\tserverpass = getpass.getpass(\"Enter the Server pass passphrase:\")\n\tserver = factory().create(\"server\", cn = (dns if usedns else ip), ip=ip, dns=dns)\n\tserver.save(\"server.p12\",password=serverpass).save(\"server.jks\", caalias=\"ca\",password=serverpass)\n\n#try:\n# client.save(\"client.bks\", caalias=\"cacert\")\n# server.save(\"server.bks\", caalias=\"cacert\")\n#except Exception as ex:\n# print(\"warning: couldn't generate BKS certificates:\\n\" + str(ex))\n\n#factory.destroy()\n" }, { "alpha_fraction": 0.5409219264984131, "alphanum_fraction": 0.5512700080871582, "avg_line_length": 22.622222900390625, "blob_id": "0a02acfd8474f6f2400e32d2166e19719f257ce7", "content_id": "712eb32af895c28a0126ea8c66a06e1de2c5d366", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1095, "license_type": "no_license", "max_line_length": 73, "num_lines": 45, "path": "/java/iceTicketProject/generated/com/zzwtec/ticket/ticket/_TicketServiceOperations.java", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "// **********************************************************************\n//\n// Copyright (c) 2003-2015 ZeroC, Inc. All rights reserved.\n//\n// This copy of Ice is licensed to you under the terms described in the\n// ICE_LICENSE file included in this distribution.\n//\n// **********************************************************************\n//\n// Ice version 3.6.1\n//\n// <auto-generated>\n//\n// Generated from file `TicketServer.ice'\n//\n// Warning: do not edit this file.\n//\n// </auto-generated>\n//\n\npackage com.zzwtec.ticket.ticket;\n\n/**\n * 票务服务接口\n **/\npublic interface _TicketServiceOperations\n{\n /**\n * 下单\n * @param __current The Current object for the invocation.\n **/\n boolean createOrder(Order myOrder, Ice.Current __current);\n\n /**\n * 查询订单\n * @param __current The Current object for the invocation.\n **/\n Order[] queryMyOrders(String phone, Ice.Current __current);\n\n /**\n * 取消订单\n * @param __current The Current object for the invocation.\n **/\n boolean cancleOrder(long orderId, Ice.Current __current);\n}\n" }, { "alpha_fraction": 0.6962962746620178, "alphanum_fraction": 0.800000011920929, "avg_line_length": 26.200000762939453, "blob_id": "7854c0f14bbc5ab4fe47be5ed40e0a1669651963", "content_id": "98931a4d0e1231915e26ee34cf51259053f83d7b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 135, "license_type": "no_license", "max_line_length": 61, "num_lines": 5, "path": "/java/iceTicketProject/src/main/resources/ticketsystem.properties", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "jdbc_url=jdbc:mysql://127.0.0.1:3306/ticketdb?useUnicode=true\njdbc_user=root\njdbc_password=123456\njdbc_maxActive=100\njdbc_minIdle=5" }, { "alpha_fraction": 0.7452107071876526, "alphanum_fraction": 0.7452107071876526, "avg_line_length": 22.727272033691406, "blob_id": "8d29428fbe41ac5ac734e3bdf3708ccec56bd897", "content_id": "8c920fa42b440e4029424c3bd0cf775d8900722a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 522, "license_type": "no_license", "max_line_length": 74, "num_lines": 22, "path": "/java/iceTicketProject/src/main/java/com/zzwtec/iceTicketProject/util/SpringUtil.java", "repo_name": "DLTech21/Ice-demo", "src_encoding": "UTF-8", "text": "package com.zzwtec.iceTicketProject.util;\n\nimport org.springframework.context.support.ClassPathXmlApplicationContext;\n\npublic class SpringUtil {\n\tprivate static ClassPathXmlApplicationContext ctx;\n\t\n\tpublic static synchronized <T>T getBean(Class<T> beanCls){\n\t\tif(ctx == null){\n\t\t\tctx = new ClassPathXmlApplicationContext(\"applicationContext.xml\");\n\t\t\tctx.registerShutdownHook();\n\t\t}\n\t\treturn ctx.getBean(beanCls);\n\t}\n\t\n\tpublic static synchronized void shutdown(){\n\t\tif(ctx!=null){\n\t\t\tctx.close();\n\t\t\tctx = null;\n\t\t}\n\t}\n}\n" } ]
26
kamushekp/Optimal-Experiments
https://github.com/kamushekp/Optimal-Experiments
f7604b03dfd8068ec1c30baa8037491a5210e7c7
1ab4d41b338da5dae96a8dfb0d99cb41a06039e1
6c62e00e4292b237c5ad6477a8de34e05fee6d7d
refs/heads/master
2017-10-07T14:20:52.516569
2017-04-02T17:33:29
2017-04-02T17:33:29
81,194,090
0
1
null
null
null
null
null
[ { "alpha_fraction": 0.49706655740737915, "alphanum_fraction": 0.5112280249595642, "avg_line_length": 26.73595428466797, "blob_id": "b0c259d7bdf8b33bca258ae6d58fdaccfdb8be23", "content_id": "72ebeedda79d13c1d7946a5bf2df5a83acb54918", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5175, "license_type": "no_license", "max_line_length": 117, "num_lines": 178, "path": "/laba4_sinthesis.py", "repo_name": "kamushekp/Optimal-Experiments", "src_encoding": "UTF-8", "text": "import numpy as np\nfrom scipy.optimize import minimize as minim\n\n\nreal = [1., 0.5, 0.5, 0.5]\n\n\n \n \ndef main(N_grid, N_plan, estims):\n m = 4\n\n\n def fx(point):\n x = point[0]\n y = point[1]\n z = point[2]\n temp = mnk[0]*x**mnk[1]*y**mnk[2]*z**mnk[3]\n \n return np.matrix([temp/mnk[0], temp*np.log(x),\n temp*np.log(y), temp*np.log(z)])\n \n def M(design):\n res = np.zeros((m, m))\n \n for elem in design:\n _fx = fx(elem)\n res += np.dot(_fx.T,_fx)\n \n return res\n \n \n def delta(x_j, x): #from p.2\n D = np.linalg.inv(M(design)) \n \n \n def get_dxe():\n fx_ = fx(x) \n dxe = np.dot(fx_, D)\n return np.dot(dxe, fx_.T)\n \n \n def get_dxje(): \n fx_j = fx(x_j) \n dx_je = np.dot(fx_j, D)\n return np.dot(dx_je, fx_j.T)\n \n \n dxe = get_dxe()\n dxje = get_dxje()\n dxxje = np.dot(fx(x), D)\n dxxje = np.dot(dxxje, fx(x_j).T)\n \n res = (dxe-dxje)/N_plan - (dxe*dxje - dxxje**2)/N_plan**2\n \n return float(res)\n \n \n grid = np.linspace(1, 2, N_grid)\n\n \n from itertools import product\n points = [elem for elem in product(grid, grid, grid)]\n \n from random import randint\n \n init = [randint(0, len(points) - 1) for i in range(N_plan)]\n design = np.array(points)[init]\n design = [[*elem] for elem in design]\n\n def find_replacement(need_replacement, index_of_replacement, plan):\n for point in points:\n if not any(point[0] == elem[0] and point[1] == elem[1] for elem in plan):\n\n _delta = delta(need_replacement, point)\n if _delta > 1e-2:\n last_det = np.linalg.det(M(plan))\n \n last_point = plan.pop(index_of_replacement)\n plan.insert(index_of_replacement, [*point])\n new_det = np.linalg.det(M(plan))\n \n if new_det<last_det:\n plan.pop(index_of_replacement)\n plan.insert(index_of_replacement, [*last_point])\n return False\n #print ('Вставил точку {0} вместо точки {1}\\n'.format(point, need_replacement))\n return True\n return False\n \n def PrettyPoint(Point):\n return \"(%0.2f, %0.2f %0.2f)\" %(Point[0], Point[1], Point[2])\n \n print('Определитель информационной матрицы стартового плана эксперимента: {}\\n'.format(np.linalg.det(M(design))))\n for elem in design:\n print(PrettyPoint(elem)) \n flag = True\n while flag:\n flag = False\n for index, point in enumerate(design): \n #print('Ищу замену для {}\\n'.format(point))\n flag = max(find_replacement(point, index, design), flag)\n det = np.linalg.det(M(design))\n \n print('Определитель последней информационной матрицы: {0}\\n'.format(det))\n result = []\n print('Оптимальный план:\\n')\n for elem in design:\n current = PrettyPoint(elem)\n print(current)\n result.append(elem)\n return result\n \n \nimport itertools as it\nimport random as rd \nfrom laba4_MNK import*\nN_grid = 9\nN_plan = 25\n\ngrid = np.linspace(1, 2, N_grid)\npoints = [elem for elem in it.product(grid, grid, grid)]\ninit = [rd.randint(0, len(points) - 1) for i in range(N_plan)]\ninit = np.array(points)[init]\ny, noisy = make_data(real, init)\n\nestims = get_linear_MNK(noisy, init)\nestims[0] = np.exp(estims[0])\nmnk = [float(estim) for estim in estims]\nprint(\"МНК оценки для локально-оптимального построения есть {}\\n\\n\".format(mnk))\n\noptimal = np.array(main(N_grid, N_plan, estims))\nprint('\\n\\n')\n\n\ndef get_true_MNK(noisy, design):\n\n def f_x(point):\n x = point[0]\n y = point[1]\n z = point[2]\n temp = mnk[0]*x**mnk[1]*y**mnk[2]*z**mnk[3]\n \n return [temp/mnk[0], temp*np.log(x),\n temp*np.log(y), temp*np.log(z)]\n \n y=np.matrix(noisy).T \n X = np.matrix([[1] + f_x(point) for point in design])\n left = np.dot(X.T, X)\n right = np.dot(X.T, y)\n estim = np.linalg.solve(left, right)\n\n\n return estim\n\ndef get_norm(real, mnk):\n res = 0\n for i in range(len(real)):\n res+=(real[i]-mnk[i])**2\n return np.sqrt(res)\n\nN = 200\nres = 0\n\nfor i in range(N):\n y, noisy = make_data(real, init)\n estima = get_true_MNK(noisy, init)\n res+=get_norm(real, estima)\nprint(res/N)\n\nN = 200\nres = 0\nfor i in range(N):\n y, noisy = make_data(real, optimal)\n estima = get_true_MNK(noisy, optimal)\n res+=get_norm(real, estima)\nprint(res/N)\n#оценивает на основе плана эксперимента\n\n \n" }, { "alpha_fraction": 0.4593004286289215, "alphanum_fraction": 0.49035632610321045, "avg_line_length": 32.39325714111328, "blob_id": "2eb750a18cf2f2216902766c64110984079b8e5d", "content_id": "5894daf731a50e7f2e838e2fa3b1b4149401c6fa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3586, "license_type": "no_license", "max_line_length": 82, "num_lines": 89, "path": "/main.py", "repo_name": "kamushekp/Optimal-Experiments", "src_encoding": "UTF-8", "text": "from fun import*\r\nfrom itertools import *\r\ndef main():\r\n \r\n #part 1\r\n axe = np.linspace(1, 10, 20)\r\n take = set()\r\n comb = list(combinations(axe, 3))\r\n while len(take) < ct.n:\r\n take.add(np.random.randint(0, len(comb)))\r\n \r\n\r\n axe = [comb[elem] for elem in take]\r\n dots = axe\r\n teta = [[1,1,1,1], [2,2,2,2], [3,3,3,3]]\r\n X, T, Res = respond(teta, dots)\r\n \r\n scale = [0.1*np.mean(Res[:, i])for i in range(3)] \r\n print(scale)\r\n E, cov, cor = make_noise(*scale)\r\n Work = E+Res\r\n print('Матрица истинных оценок параметров\\n{}\\n\\n'.format(T))\r\n\r\n \r\n #part 2\r\n mnk = get_MNK(Work, X)\r\n print('\\n\\n\\nМНК-оценки матрицы параметров\\n{}'.format(mnk))\r\n\r\n #part 3-4\r\n def hypot(param):\r\n print('проверим гипотезу о незначимости {} строк'.format(param))\r\n nu = len(param)\r\n if nu == 3:\r\n R = np.matrix([[0, 1, 0, 0],[0, 0, 1, 0], [0, 0, 0, 1]]) \r\n U = np.zeros ((3, 3))\r\n \r\n l = lambda_stat(Work, X, R, U) \r\n print('\\n\\nЛямбда - статистика = {0}\\n\\\r\n Значение параметров с ограничениями\\n{1}\\n\\n'.format(l[1], l[0]))\r\n our, f = Bartlett(nu, l[1]) \r\n print('Преобразование Бартлетта: {0}\\n\\\r\n Значение квантили хи-2 распределения есть {1}'.format(our, f))\r\n \r\n our, chi = Rao(nu, l[1])\r\n print('Преобразование Рао: {0}\\n\\\r\n Значение квантили хи-квадрат распределения есть {1}'.format(our, chi))\r\n else:\r\n if nu == 1:\r\n R = np.matrix([0, 0, 0, 0])\r\n R[0, param[0]] = 1\r\n U = np.zeros ((1, 3))\r\n elif nu == 2:\r\n R = np.matrix([[0, 1, 0, 0],[0, 0, 1, 0]]) \r\n U = np.zeros ((2, 3))\r\n \r\n l = lambda_stat(Work, X, R, U) \r\n print('\\n\\nЛямбда - статистика = {0}\\n\\\r\n Значение параметров с ограничениями\\n{1}\\n\\n'.format(l[1], l[0]))\r\n our, f = Lambda_Fisher_converter(nu, l[1]) \r\n print('Преобразование лямбды к F - статистике: {0}\\n\\\r\n Значение квантили F- распределения есть {1}'.format(our, f))\r\n \r\n s = T_stat(Work, X, R, U)\r\n print('\\nЗначение статистики следа = {}'.format(s))\r\n our, chi = T_Chi_converter(nu, s)\r\n print('Преобразование статистики следа к хи-квадрат статистике: {0}\\n\\\r\n Значение квантили хи-квадрат распределения есть {1}'.format(our, chi))\r\n hypot([1])\r\n hypot([2])\r\n hypot([1, 2])\r\n hypot([1, 2, 3])\r\n \r\n #part 5\r\n print('Вычислим предсказанные значения откликов и остатки')\r\n prediction = np.dot(X, mnk) \r\n def draw(number):\r\n import matplotlib.pyplot as plt\r\n\r\n \r\n x_axe = [i for i in range(ct.n)]\r\n plt.plot(x_axe, Work[:,number], x_axe, prediction[:,number])\r\n\r\n \r\n \r\n plt.plot(x_axe, Work[:,number]-prediction[:,number])\r\n plt.show()\r\n draw(2)\r\nnp.random.seed(1234)\r\nmain()" }, { "alpha_fraction": 0.3929460644721985, "alphanum_fraction": 0.4796680510044098, "avg_line_length": 28.703702926635742, "blob_id": "a4d19581e85f1156dc8f32519ca7307b71c7940c", "content_id": "36906c5392b8262b0218cf56eded27c53529f283", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2410, "license_type": "no_license", "max_line_length": 70, "num_lines": 81, "path": "/laba1.py", "repo_name": "kamushekp/Optimal-Experiments", "src_encoding": "UTF-8", "text": "import numpy as np\n\n\ndef laba1(m, design, weights): \n M = np.zeros((m, m))\n for point, weight in zip(design, weights):\n fx = np.matrix([point**i for i in range(m)])\n M += weight*np.dot(fx.T, fx)\n \n def D_cr():\n return np.linalg.det(M)\n \n \n def A_cr():\n return np.trace(np.linalg.inv(M))\n \n \n def E_cr():\n return min(np.linalg.eig(M)[0])\n \n \n def F2_cr():\n temp = np.linalg.matrix_power(M, -2)\n return np.sqrt(np.trace(temp)/m)\n \n \n def Lambda_cr():\n lambdi = np.linalg.eig(M)\n l_mean = np.mean(lambdi[0])\n return sum([(x - l_mean)**2 for x in lambdi[0]])\n \n \n def MV_cr():\n return max([np.linalg.inv(M)[i, i] for i in range(m)])\n \n \n def G_cr():\n def d_x(x):\n if x < -1 or x > 1: \n return 0\n else:\n f_x = np.matrix([x**i for i in range(m)])\n dx = np.dot(f_x, np.linalg.inv(M))\n dx = np.dot(dx, f_x.T)\n return float(-dx)\n \n \n from scipy.optimize import minimize_scalar\n import matplotlib.pyplot as plt\n t = np.arange(-2., 2., 0.02)\n plt.plot(t, [-d_x(elem) for elem in t], '.')\n print(minimize_scalar(d_x))\n return -minimize_scalar(d_x).fun\n\n all_criteria = [D_cr, A_cr, E_cr, F2_cr, Lambda_cr, MV_cr, G_cr]\n result = [float(cr()) for cr in all_criteria]\n print(result)\n#laba1(5,[-1,-0.707,0,0.707,1],[0.093,0.248,0.3178,0.248,0.093])\n#laba1(5,[-1,-0.683,0,0.683,1],[0.107,0.25,0.286,0.25,0.107])\n#laba1(5,[-1,-0.7379,0,0.7379,1],[0.1092,0.2513,0.2785,0.2513,0.1092])\n#laba1(5,[-1,-0.7,0,0.7,1],[0.2,0.2,0.2,0.2,0.2])\nlaba1(5,[-1,-0.707,0,0.707,1],[0.225, 0.2, 0.15, 0.2, 0.225]) \ndef task4():\n \n def F2_crit(p):\n design = [-1, -0.707, 0, 0.707, 1]\n weights = [1.5*p, 0.5-2*p, p, 0.5-2*p, 1.5*p]\n M = np.zeros((5, 5))\n for point, weight in zip(design, weights):\n fx = np.matrix([point**i for i in range(5)])\n M += weight*np.dot(fx.T, fx)\n \n \n temp = np.linalg.matrix_power(M,-2)\n return np.sqrt(np.trace(temp)/5)\n \n \n import matplotlib.pyplot as plt\n t = np.arange(0.001, 0.25, 0.001)\n plt.plot(t,[F2_crit(elem)for elem in t])\n#task4() \n" }, { "alpha_fraction": 0.5452685356140137, "alphanum_fraction": 0.5624040961265564, "avg_line_length": 26.143884658813477, "blob_id": "81181337a6679abbb15484c9ba2db80737879770", "content_id": "61f5f845de4b810d80deca73633455b75bdd2599", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4454, "license_type": "no_license", "max_line_length": 75, "num_lines": 139, "path": "/fun.py", "repo_name": "kamushekp/Optimal-Experiments", "src_encoding": "UTF-8", "text": "import numpy as np\r\nfrom scipy.stats import distributions\r\nimport const as ct\r\n'''\r\nНотации\r\n\r\nРассматривается традиционная модель с одним\r\nнабором регрессоров для всех откликов.\r\n\r\nНаблюдение - значение многомерной фукции\r\nв точке.\r\nОтклик - совокупность нескольких наблюдений.\r\nq-мерный отклик - несколько наборов наблюдений.\r\n\r\n'''\r\n\r\n\r\n#вектор-строка регрессоров\r\ndef regrs(x, y, z):\r\n return np.matrix([1, x, y, z*z])\r\n\r\n\r\n#матрица истинных откликов \r\ndef respond(teta, dots):\r\n X = X_matrix(dots)\r\n teta = teta_matrix(teta)\r\n res = np.dot(X, teta)\r\n return X, teta, res\r\n\r\n#матрица истинных параметров\r\ndef teta_matrix(tetas):\r\n res = np.zeros((ct.m, ct.q))\r\n for i in range(ct.q):\r\n res[:,i] = tetas[i]\r\n return res\r\n\r\ndef X_matrix(dots):\r\n res = np.empty((ct.n, ct.m))\r\n for i in range(ct.n):\r\n res[i] = regrs(*dots[i])\r\n return res\r\n\r\n#Матрица шумов согласно варианту задания\r\ndef make_noise(sc1, sc2, sc3):\r\n E = np.empty((ct.n, ct.q))\r\n E[:, 0] = np.random.normal(loc = 0, scale = sc1, size = ct.n)\r\n E[:, 1] = E[:, 0] + np.random.normal(loc = 0, scale = sc2, size = ct.n)\r\n E[:, 2] = E[:, 0]+ np.random.normal(loc = 0, scale = sc3, size = ct.n)\r\n a = np.vstack((E[:, 0],E[:, 1], E[:, 2]))\r\n \r\n return E, np.cov(a), np.corrcoef(a) \r\n\r\n\r\n\r\n#МНК-оценка параметров\r\ndef get_MNK(Y, X):\r\n left = np.dot(X.T, X)\r\n right = np.dot(X.T, Y)\r\n return np.linalg.solve(left,right)\r\n\r\n#Многомерная остаточная сумма квадратов \r\ndef RSS(Y, X, Teta):\r\n temp = np.dot(X, Teta)\r\n temp = Y - temp\r\n return np.dot(temp.T, temp)\r\n \r\n\r\n#Задаются ограничения на параметры \r\n#R*teta = U\r\n#[VxM]*[MxQ] = [VxQ]\r\n#V - количество ограничений\r\ndef get_MNK_limited(Y, X, R, U):\r\n teta = get_MNK(Y, X)\r\n fish = np.dot(X.T, X)\r\n fish = np.linalg.inv(fish)\r\n \r\n right = np.dot(R, fish)\r\n right = np.dot(right, R.T)\r\n right = np.linalg.inv(right)\r\n fish = np.dot(fish, R.T)\r\n right = np.dot(fish, right)\r\n right = np.dot(right, U - np.dot(R, teta))\r\n res = right+teta\r\n return res\r\n \r\n#статистика Уилкса - максимального правдоподобия\r\ndef lambda_stat(Y, X, R, U):\r\n rss = RSS(Y, X, get_MNK(Y, X))\r\n Lambda_limited = get_MNK_limited(Y, X, R, U)\r\n rss_r = RSS(Y, X, Lambda_limited)\r\n rss = np.linalg.det(rss)\r\n rss_r = np.linalg.det(rss_r)\r\n return Lambda_limited, rss/rss_r\r\n\r\n#статистика следа Лоули-Хотеллинга\r\ndef T_stat(Y, X, R, U):\r\n rss = RSS(Y, X, get_MNK(Y, X))\r\n rss_r = RSS(Y, X, get_MNK_limited(Y, X, R, U))\r\n return np.trace(np.dot(np.linalg.inv(rss), rss_r)) - ct.q\r\n \r\ndef Lambda_Fisher_converter(nu, Lambda):\r\n \r\n expr = ct.n - ct.m - ct.q + 1\r\n temp = expr / ct.q\r\n if nu == 1:\r\n temp *= (1-Lambda)/Lambda\r\n f = distributions.f.ppf(0.095, ct.q, expr)\r\n elif nu == 2:\r\n sq = np.sqrt(Lambda)\r\n temp *= (1-sq)/sq\r\n f = distributions.f.ppf(0.095, 2*ct.q, 2*expr)\r\n return temp, f\r\n \r\ndef T_Chi_converter(nu, t):\r\n if nu == 1:\r\n chi = distributions.f.ppf(0.095, ct.q, ct.n - ct.m - ct.q + 1)\r\n return chi, (ct.n - ct.m - ct.q + 1) * t/ct.q\r\n else:\r\n \r\n chi = distributions.chi2.ppf(0.095, ct.q * nu)\r\n return chi, (ct.n - ct.m - ct.q + 1) * t\r\n \r\n \r\n#Барлетта аппроксимация лямбда критерия\r\ndef Bartlett(nu, Lambda):\r\n chi = distributions.chi2.ppf(0.095,ct.q * nu)\r\n temp = (ct.q - nu + 1) / 2 + ct.m\r\n return (temp - ct.n) * np.log(Lambda), chi\r\n\r\n#Рао аппроксимация лямбда критерия\r\ndef Rao(nu, Lambda):\r\n beta = ct.n - ct.m - (ct.q - nu + 1) / 2\r\n gamma = np.sqrt((ct.q*ct.q*nu*nu - 4) / (ct.q*ct.q+nu*nu - 5))\r\n sigma = (ct.q * nu - 2) / 4\r\n \r\n temp = np.power(Lambda, 1/gamma)\r\n temp = (1 - temp) / temp * (beta*gamma - 2* sigma) / ct.q * nu\r\n f = distributions.f.ppf(0.095, ct.q * nu, beta * gamma - 2*sigma)\r\n return temp, f" }, { "alpha_fraction": 0.4845360815525055, "alphanum_fraction": 0.5151926279067993, "avg_line_length": 28.479339599609375, "blob_id": "91d75283ccf26f454aeecd9c8d69421042a8026e", "content_id": "39df59a1d60dd4bbf04776e908903a0fafd0e26c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3837, "license_type": "no_license", "max_line_length": 152, "num_lines": 121, "path": "/fedorov_alg.py", "repo_name": "kamushekp/Optimal-Experiments", "src_encoding": "UTF-8", "text": "from scipy import optimize\r\nimport numpy as np\r\nfrom Design import Design\r\n'''ВНИМАНИЕ генерация случайных чисел пока только от 0 до 1'''\r\n\r\nfrom itertools import product\r\n\r\n\r\ndef laba2():\r\n \r\n def get_f(x,y):\r\n return np.matrix([1,x,y,x*x,y*y,x*y,x*x*y,x*y*y,x*x*x,y*y*y]).T\r\n\r\n\r\n def M(plan,weights):\r\n N=len(plan) \r\n M=np.zeros((10,10))\r\n for i in range(N):\r\n\r\n f_x=get_f(plan[i][0], plan[i][1])\r\n M+=weights[i]*np.dot(f_x, f_x.T)\r\n \r\n return M\r\n \r\n \r\n \r\n\r\n\r\n\r\n def to_minimize(x0):\r\n Fisher=M(design.points, design.weights)\r\n f_x=get_f(x0[0], x0[1]) \r\n M_inv=np.linalg.inv(Fisher)\r\n \r\n \r\n dx=np.dot(f_x.T,M_inv)\r\n dx=np.dot(dx,f_x)\r\n #print('{0}, {1}, {2}'.format(x0[0], x0[1], float(dx)))\r\n return float(-dx)\r\n\r\n \r\n init=[-1,-0.75,-0.5,0,0.5,0.75,1]\r\n initial_design=list([*elem] for elem in product(init,init))\r\n initial_weights=[1/49. for i in range(49)] \r\n\r\n design=Design(initial_design,initial_weights) \r\n\r\n ostanov = -11 \r\n i=0\r\n f = open('laba 2 continue.txt', 'w')\r\n while(-ostanov-10>0.01): \r\n i+=1\r\n alpha=0.5\r\n rranges = ((-1, 1), (-1, 1))\r\n resbrute = optimize.brute(func = to_minimize,ranges = rranges, Ns = 100, finish = None)\r\n ostanov = to_minimize([*resbrute])\r\n\r\n newDet = 1\r\n oldDet = 2\r\n oldWeights = design.weights.copy()\r\n oldPoints = design.points.copy()\r\n while newDet < oldDet:\r\n alpha/=2\r\n design.weights = oldWeights.copy()\r\n design.points = oldPoints.copy()\r\n oldDet = np.linalg.det(M(design.points, design.weights))\r\n design.anpcow([*resbrute],alpha)\r\n newDet = np.linalg.det(M(design.points, design.weights))\r\n \r\n print('\\n\\n\\n Итерация{3} \\n Добавляемая точка - {0} с весом {1} \\n Текущее значение критерия {2}\\n'.format(resbrute, alpha, ostanov, i))\r\n f.write('\\n\\n\\n Итерация{3} \\n Добавляемая точка - {0} с весом {1} \\n Текущее значение критерия {2}\\n'.format(resbrute, alpha, ostanov, i))\r\n \r\n #design.find_nonc(0.01)\r\n \r\n design.set_control(2,0.01)\r\n Weights = ['%.2e'%elem for elem in design.weights]\r\n print([elem for elem in zip(design.points, Weights)])\r\n res = [elem for elem in zip(design.points, Weights)]\r\n f.write('\\n'.join(str(e) for e in res))\r\n\r\n \r\nlaba2()\r\n'''\r\n\r\ndef round_design(design,prec):\r\n design=list(map(lambda x:float(round(x,prec)),design))\r\n return design\r\n \r\n\r\ndef fed_alg(purpose,x0_design,kernel,p,h):\r\n design=Design(x0_design)\r\n \r\n def minimax(dot):\r\n dot=float(dot)\r\n d=dx(purpose,design,dot,p,h,kernel)\r\n w=K((dot-purpose)/h,mode=kernel)/h\r\n return float(-w*d)\r\n\r\n for i in range(10): \r\n a=np.random.rand()*(-1)**np.random.randint(1,3)\r\n \r\n f=minimize(minimax,a,method='Nelder-Mead',\\\r\n tol=1e-6,options={'maxiter': 1e+8, 'maxfev': 1e+8})\r\n sig=-f.fun-p-1#???????????\r\n alfa=sig/(sig+p)/(p+1) \r\n print(alfa)\r\n design.anpcow(float(f.x),alfa)\r\n design.set_control(2,0.01)\r\n design.find_nonc(0.01)\r\n print ('iteration')\r\n design.set_control(2,0.01)\r\n design.set_control(2,0.01)\r\n design.find_nonc(0.01)\r\n res=list(zip(design.points,design.weights))\r\n def s(elem):\r\n return elem[1]\r\n \r\n res=sorted(res,key=s)\r\n print(res)\r\n \r\n'''" }, { "alpha_fraction": 0.48096564412117004, "alphanum_fraction": 0.5116063356399536, "avg_line_length": 20.520000457763672, "blob_id": "4f9976ce6b05b8cb749dcbfd87824c2f41768a7e", "content_id": "c6bff50249571231423ae873cc9cf8e160b53d6e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1111, "license_type": "no_license", "max_line_length": 68, "num_lines": 50, "path": "/laba4_MNK.py", "repo_name": "kamushekp/Optimal-Experiments", "src_encoding": "UTF-8", "text": "import numpy as np\nimport itertools as it\nimport random as rd\n\n\n\ndef make_data(rc, factors):\n '''factors is array like ([x1, x2, x3], [x1, x2, x3],...)''' \n \n N = len(factors)\n y = []\n\n \n def get_out(x1, x2, x3):\n return rc[0] * (x1**rc[1]) * (x2**rc[2]) * (x3**rc[3])\n \n for i in range(N):\n x1 = factors[i, 0]\n x2 = factors[i, 1]\n x3 = factors[i, 2]\n \n y.append(get_out(x1, x2, x3))\n \n std= np.std(y)\n std*=std\n std*=0.1\n std=np.sqrt(std)\n y_with_noise = [elem+rd.normalvariate(0, std) for elem in y]\n return y, y_with_noise\n \n\n\ndef get_linear_MNK(noisy, init):\n #пункты до 3 включительно\n\n \n #МНК\n \n def f_x(point):\n #point is (x1, x2, x3)\n res = [np.log(elem) for elem in point]\n return res\n \n y=np.matrix([np.log(elem) for elem in noisy]).T \n X = np.matrix([[1] + f_x(point) for point in init])\n left = np.dot(X.T, X)\n right = np.dot(X.T, y)\n estim = np.linalg.solve(left, right)\n #МНК работает\n return estim\n\n" } ]
6
shreyaskuthe/regression
https://github.com/shreyaskuthe/regression
21b91fbbda78b1075562b3e440a7b765ee4f2c56
913d17d9be6ef2852e96c2114d29eae6970b9b4e
3adeb0094c90cf1a70851d66228e8f1648af68de
refs/heads/master
2022-11-27T12:41:16.734445
2020-07-15T09:40:00
2020-07-15T09:40:00
279,826,733
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6696174740791321, "alphanum_fraction": 0.6968068480491638, "avg_line_length": 25.615720748901367, "blob_id": "92ae4c1e81c847a71a3191418145472ad7bdfe69", "content_id": "6b4669365df2a5727d4c07e9f4fe0a1f97cc4c81", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6326, "license_type": "no_license", "max_line_length": 99, "num_lines": 229, "path": "/linear regression.py", "repo_name": "shreyaskuthe/regression", "src_encoding": "UTF-8", "text": "import pandas as pd\r\ndata=pd.read_csv(r'E:\\DATA SCIENCE\\imarticus\\python\\datasets\\Advertising.csv',index_col=0,header=0)\r\nprint(data.head())\r\n#%%%\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nimport seaborn as sns\r\n#%%%\r\nprint(data.dtypes)\r\nprint(data.shape)\r\nprint(data.describe())\r\n#%%%\r\ndata.boxplot(column='TV')\r\n#%%%\r\ndata.boxplot(column='radio')\r\n#%%%\r\ndata.boxplot(column='newspaper')\r\n#%%%\r\nsns.pairplot(data,x_vars=['TV','radio','newspaper'],y_vars='sales',\r\n kind='reg')\r\n#%%%\r\n#create X and Y\r\nX=data[['TV','radio','newspaper']]\r\nY=data['sales']\r\n#%%%\r\nsns.distplot(Y,hist=True)\r\n#%%%\r\n\"\"\"\r\n#log transformation\r\nimport numpy as np\r\nY_log=np.log(Y)\r\n\"\"\"\r\n#sns.distplot(Y_log,hist=True)\r\n#%%%\r\nX.hist(bins=20)\r\n#%%%\r\nfrom scipy.stats import skew\r\ndata_num_skew = X.apply(lambda x: skew(x.dropna()))\r\ndata_num_skewed = data_num_skew[(data_num_skew > .75) | (data_num_skew < -.75)]\r\n\r\nprint(data_num_skew)\r\nprint(data_num_skewed)\r\nimport numpy as np\r\n# apply log + 1 transformation for all numeric features with skewness over .75\r\nX[data_num_skewed.index] = np.log1p(X[data_num_skewed.index])\r\n#%%%\r\nX.hist(bins=50)\r\n#%%%\r\nimport seaborn as sns\r\ncorr_df=X.corr(method='pearson')\r\nprint(corr_df)\r\nplt.figure(figsize=(5,5))\r\nsns.heatmap(corr_df,vmax=1.0,vmin=-1.0,annot=True)\r\nplt.show()\r\n#%%%\r\nfrom statsmodels.stats.outliers_influence import variance_inflation_factor as vif\r\n\r\nvif_df = pd.DataFrame()\r\nvif_df[\"features\"] = X.columns\r\nvif_df[\"VIF Factor\"] = [vif(X.values, i) for i in range(X.shape[1])]\r\nvif_df.round(2)\r\n#%%%\r\nfrom sklearn.model_selection import train_test_split\r\n#Split the data into test and train\r\nX_train,X_test,Y_train,Y_test=train_test_split(X,Y,test_size=0.2,random_state=10)\r\n#%%%\r\nfrom sklearn.linear_model import LinearRegression\r\n#create a model object\r\nlm=LinearRegression()\r\n#train the model object\r\nlm.fit(X_train,Y_train)\r\n#print intercept and coefficients\r\nprint(lm.intercept_)\r\nprint(lm.coef_)\r\n#%%%\r\n#pair the feature names with the coefficients\r\nprint(list(zip(X.columns,lm.coef_)))\r\n#%%%\r\nX1=100\r\nX2=100\r\nX3=50\r\ny_pred=3.353291385815151+(0.0437425*X1)+(0.19303708*X2)+(-0.04895137*X3)\r\nprint(y_pred)\r\n#%%%\r\n#predict using the model\r\nY_pred=lm.predict(X_test)\r\nprint(Y_pred)\r\n#%%%\r\nnew_df=pd.DataFrame()\r\nnew_df=X_test\r\nnew_df['Actual sales']=Y_test\r\nnew_df['Predicted sales']=Y_pred\r\nnew_df\r\n#%%%\r\nfrom sklearn.metrics import r2_score,mean_squared_error\r\nimport numpy as np\r\nr2=r2_score(Y_test,Y_pred)\r\nprint(r2)\r\nrmse=np.sqrt(mean_squared_error(Y_test,Y_pred))\r\nprint(rmse)\r\nadjusted_r_squared = 1 - (1-r2)*(len(Y)-1)/(len(Y)-X.shape[1]-1)\r\nprint(adjusted_r_squared)\r\n#%%%\r\nprint(min(Y_test))\r\nprint(max(Y_test))\r\n#%%%\r\nnew_df=pd.DataFrame()\r\nnew_df=X_train\r\nnew_df['sales']=Y_train\r\nnew_df.shape\r\n#%%%\r\nimport statsmodels.formula.api as sm\r\n#create a fitted model with all three features\r\nlm_model=sm.ols(formula='sales~TV+radio+newspaper',data=new_df).fit()\r\n#print the coefficients\r\nprint(lm_model.params)\r\nprint(lm_model.summary())\r\n#%%%\r\nY_pred_new=lm_model.predict(X_test)\r\nfrom sklearn.metrics import r2_score,mean_squared_error\r\nimport numpy as np\r\nr2=r2_score(Y_test,Y_pred)\r\nprint(r2)\r\nrmse=np.sqrt(mean_squared_error(Y_test,Y_pred))\r\nprint(rmse)\r\nadjusted_r_squared = 1 - (1-r2)*(len(Y)-1)/(len(Y)-X.shape[1]-1)\r\nprint(adjusted_r_squared)\r\n#%%%\r\nnew_df1=pd.DataFrame()\r\nnew_df1=X_test\r\nnew_df1['sales']=Y_test\r\nnew_df1.shape\r\n#%%%\r\nimport statsmodels.formula.api as sm\r\n#create a fitted model with all three features\r\nlm_model1=sm.ols(formula='sales~TV+radio+newspaper',data=new_df1).fit()\r\n#print the coefficients\r\nprint(lm_model1.params)\r\nprint(lm_model1.summary())\r\n#%%%\r\nnew_df2=pd.DataFrame()\r\nnew_df2=X_train\r\nnew_df2['sales']=Y_train\r\nnew_df2.shape\r\n#%%%\r\nimport statsmodels.formula.api as sm\r\n#create a fitted model with all three features\r\nlm_model2=sm.ols(formula='sales~TV+radio',data=new_df2).fit()\r\n#print the coefficients\r\nprint(lm_model2.params)\r\nprint(lm_model2.summary())\r\n#%%%\r\nnew_df3=pd.DataFrame()\r\nnew_df3=X_test\r\nnew_df3['sales']=Y_test\r\nnew_df3.shape\r\n#%%%\r\nimport statsmodels.formula.api as sm\r\n#create a fitted model with all three features\r\nlm_model3=sm.ols(formula='sales~TV+radio+newspaper',data=new_df3).fit()\r\n#print the coefficients\r\nprint(lm_model3.params)\r\nprint(lm_model3.summary())\r\n#%%%\r\nY_pred_new=lm_model.predict(X_test)\r\nfrom sklearn.metrics import r2_score,mean_squared_error\r\nimport numpy as np\r\nr2=r2_score(Y_test,Y_pred)\r\nprint(r2)\r\nrmse=np.sqrt(mean_squared_error(Y_test,Y_pred))\r\nprint(rmse)\r\nadjusted_r_squared = 1 - (1-r2)*(len(Y)-1)/(len(Y)-X.shape[1]-1)\r\nprint(adjusted_r_squared)\r\n#%%%\r\nplot_lm_1 = plt.figure(1)\r\nplot_lm_1.set_figheight(8)\r\nplot_lm_1.set_figwidth(12)\r\n\r\n# fitted values (need a constant term for intercept)\r\nmodel_fitted_y = lm_model.fittedvalues\r\n\r\nplot_lm_1.axes[0] = sns.residplot(model_fitted_y, 'sales', data=new_df, lowess=True)\r\n\r\nplot_lm_1.axes[0].set_title('Residuals vs Fitted')\r\nplot_lm_1.axes[0].set_xlabel('Fitted values')\r\nplot_lm_1.axes[0].set_ylabel('Residuals')\r\n#%%%\r\nimport statsmodels.api as stm\r\nimport scipy.stats as stats\r\nfig = stm.qqplot(fit=True, line='45')\r\nplt.title('Normal Q-Q')\r\nplt.xlabel('Theoretical Quantiles')\r\nplt.ylabel('Standardized Residuals')\r\nplt.show()\r\n\r\n#%%%\r\n# normalized residuals\r\nmodel_norm_residuals = lm_model.get_influence().resid_studentized_internal\r\n# absolute squared normalized residuals\r\nmodel_norm_residuals_abs_sqrt = np.sqrt(np.abs(model_norm_residuals))\r\n\r\nplot_lm_3 = plt.figure(3)\r\nplot_lm_3.set_figheight(8)\r\nplot_lm_3.set_figwidth(12)\r\nplt.scatter(model_fitted_y, model_norm_residuals_abs_sqrt, alpha=0.5)\r\nsns.regplot(model_fitted_y, model_norm_residuals_abs_sqrt, lowess=True)\r\n\r\n\r\nplot_lm_3.axes[0].set_title('Scale-Location')\r\nplot_lm_3.axes[0].set_xlabel('Fitted values')\r\nplot_lm_3.axes[0].set_ylabel('$\\sqrt{|Standardized Residuals|}$')\r\n#%%%\r\nfrom sklearn.model_selection import train_test_split\r\n#Split the data into test and train\r\nX_train,X_test,Y_train,Y_test=train_test_split(X,Y,test_size=0.2,random_state=10)\r\n#%%%\r\nfrom sklearn.linear_model import Ridge\r\nlm=Ridge()\r\nlm.fit(X_train,Y_train)\r\n#print intercept and coefficients\r\nprint(lm.intercept_)\r\nprint(lm.coef_)\r\n#%%%\r\nfrom sklearn.linear_model import Lasso\r\nlm=Lasso()\r\nlm.fit(X_train,Y_train)\r\nprint(lm.intercept_)\r\nprint(lm.coef_)\r\n#%%%\r\n\r\n" } ]
1
dcpssc/thub-microservices
https://github.com/dcpssc/thub-microservices
23dfa66c0cfbbb58657d10e2bbbfa65fdeed9316
15bd23c3ba1c93b1c491fe342056651d034ac48e
57feaced37c091f7442cd42c1ab54775b8fef768
refs/heads/master
2016-08-24T02:57:20.253509
2016-07-30T23:19:03
2016-07-30T23:19:03
63,631,199
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.686956524848938, "alphanum_fraction": 0.7565217614173889, "avg_line_length": 22.200000762939453, "blob_id": "de887e11848d7a9f97746ddf05d078d69607e122", "content_id": "4a6b15db4345aeb92c061791c5020c91ecca39c2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 115, "license_type": "no_license", "max_line_length": 46, "num_lines": 5, "path": "/run.py", "repo_name": "dcpssc/thub-microservices", "src_encoding": "UTF-8", "text": "from myapp import scheduler\nscheduler.start()\n\nfrom myapp import app\napp.run(host='0.0.0.0', port=5000, debug=True)" }, { "alpha_fraction": 0.7393364906311035, "alphanum_fraction": 0.7535545229911804, "avg_line_length": 20, "blob_id": "f36a2427e32ef3a45c78bb1520bc6d9b64235df6", "content_id": "1578fe058923b8702acf659c6296f51b6949a4ad", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 211, "license_type": "no_license", "max_line_length": 36, "num_lines": 10, "path": "/test.py", "repo_name": "dcpssc/thub-microservices", "src_encoding": "UTF-8", "text": "from nameko.rpc import rpc, RpcProxy\nfrom nameko.timer import timer\nimport time\n\nclass Microservice1(object):\n\tname = 'microservice1'\n\n\t@timer(interval=5)\n\tdef wakeup_service(self):\n\t\tprint \"-> wakeup_service\"\n\n" }, { "alpha_fraction": 0.784140944480896, "alphanum_fraction": 0.784140944480896, "avg_line_length": 19.727272033691406, "blob_id": "86f65f9a303853ad886883c43068185e416716a7", "content_id": "12f2fdf97729f65615f3786979a0bb579d021171", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 227, "license_type": "no_license", "max_line_length": 41, "num_lines": 11, "path": "/myapp/__init__.py", "repo_name": "dcpssc/thub-microservices", "src_encoding": "UTF-8", "text": "from flask import Flask\n\napp = Flask(__name__)\n\nfrom flask_apscheduler import APScheduler\nfrom myapp.jobs import Config\napp.config.from_object(Config())\nscheduler = APScheduler()\nscheduler.init_app(app)\n\nfrom myapp import views" }, { "alpha_fraction": 0.7121211886405945, "alphanum_fraction": 0.760606050491333, "avg_line_length": 35.77777862548828, "blob_id": "a3fde225f9b2cbc245e9a7958eecbf7f0a7b5bf8", "content_id": "25960f6139a6e07c3a3c370f0fd286cb365f4650", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 330, "license_type": "no_license", "max_line_length": 119, "num_lines": 9, "path": "/wsgi.py", "repo_name": "dcpssc/thub-microservices", "src_encoding": "UTF-8", "text": "from myapp import scheduler\nscheduler.start()\n\nfrom myapp import app\napp.run(host='0.0.0.0')\n\n# uwsgi --http :8888 --module wsgi --callable app\n# https://www.digitalocean.com/community/tutorials/how-to-serve-flask-applications-with-uwsgi-and-nginx-on-ubuntu-14-04\n# virtualenv venv --python=/usr/local/lib/python2.7.10/bin/python" }, { "alpha_fraction": 0.6880000233650208, "alphanum_fraction": 0.7139999866485596, "avg_line_length": 16.543859481811523, "blob_id": "b07bff3ad220ef3b3f9fb84e742ceb88cea295bb", "content_id": "127de9efa53658d950acb5160283a88d4146b50a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1000, "license_type": "no_license", "max_line_length": 105, "num_lines": 57, "path": "/README.md", "repo_name": "dcpssc/thub-microservices", "src_encoding": "UTF-8", "text": "# Transit-Hub Micro Services\n\n## Key Components\n* [Flask](http://flask.pocoo.org/): A lightweight web framework\n* [Nameko](https://github.com/onefinestay/nameko): A microservice framework\n* [RabbitMQ](https://www.rabbitmq.com/): A robust message-oriented middleware\n\n## Running the code\n\nClone the repository\n\n```\n$ git clone [this repo url]\n```\n\nCreate and activate a virtual environment\n\n```\n$ virtualenv venv\n$ source ./venv/bin/activate\n```\n\nInstall the frameworks and dependencies in requirements.txt\n\n```\n$ pip install -r requirements.txt\n```\n\nInstall Docker\n\n```\nhttps://www.docker.com/\n```\n\nRun the official docker container for RabbitMQ\n\n```\n$ docker run -d --hostname my-rabbit --name some-rabbit -p 15672:15672 -p 5672:5672 rabbitmq:3-management\n```\n\nRun the microservice 1\n\n```\n$ nameko run microservice_1 --broker amqp://guest:guest@localhost\n```\n\nRun the Flask server\n\n```\n$ python run.py\n```\n\nTest the microservice in browser, it should return 6\n\n```\nhttp://localhost:5000/process\n```\n" }, { "alpha_fraction": 0.5231481194496155, "alphanum_fraction": 0.7037037014961243, "avg_line_length": 17.08333396911621, "blob_id": "76a5fda043cccc0ea32cb12e4249359fddb16645", "content_id": "1d27137cb2698312191a6802ad6b268aadf0e7dc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 216, "license_type": "no_license", "max_line_length": 29, "num_lines": 12, "path": "/requirements.txt", "repo_name": "dcpssc/thub-microservices", "src_encoding": "UTF-8", "text": "nameko==2.3.1\nflask_apscheduler==1.3.7\npymongo==3.2.2\nrequests==2.9.1\nuwsgi==2.0.12\ngtfs-realtime-bindings==0.0.4\nprotobuf_to_dict==0.1.0\npykalman==0.9.5\nnumpy==1.10.1\nnumpydoc==0.5\nscipy==0.16.1\nscikit-learn==0.16.1" }, { "alpha_fraction": 0.7267080545425415, "alphanum_fraction": 0.7267080545425415, "avg_line_length": 15.100000381469727, "blob_id": "b4d1768cdf280336765c6579745f2cd428209968", "content_id": "7adec2da1f71958c67ef0fdd7b050d0e070ae0dc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 161, "license_type": "no_license", "max_line_length": 41, "num_lines": 10, "path": "/myapp/jobs.py", "repo_name": "dcpssc/thub-microservices", "src_encoding": "UTF-8", "text": "from flask_apscheduler import APScheduler\nimport datetime\nfrom myapp import views\n\n\nclass Config(object):\n JOBS = [\n ]\n\n SCHEDULER_VIEWS_ENABLED = True\n" }, { "alpha_fraction": 0.7364864945411682, "alphanum_fraction": 0.7387387156486511, "avg_line_length": 23.72222137451172, "blob_id": "54129501ec29fa6b4dca61d7b10b2268cb47cc49", "content_id": "ac931bd8c5f2c9749da63082b892a4e11a106b33", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 444, "license_type": "no_license", "max_line_length": 58, "num_lines": 18, "path": "/myapp/views.py", "repo_name": "dcpssc/thub-microservices", "src_encoding": "UTF-8", "text": "from myapp import app\nfrom flask import Flask, render_template, session, request\nimport time\nimport datetime\nfrom nameko.standalone.rpc import ClusterRpcProxy\n\[email protected]('/')\[email protected]('/index')\ndef index():\n\treturn \"home\"\n\nCONFIG = {'AMQP_URI': \"amqp://guest:guest@localhost\"}\n\[email protected]('/process')\ndef process():\n\twith ClusterRpcProxy(CONFIG) as cluster_rpc:\n\t\thello_res = cluster_rpc.process.run.async(5)\n\t\treturn str(hello_res.result())" }, { "alpha_fraction": 0.6851925849914551, "alphanum_fraction": 0.7041703462600708, "avg_line_length": 38.071651458740234, "blob_id": "6146f763c0fa7b3a19adc6bd9b0d9ea199aabb4c", "content_id": "016002316066e6f59492ee659c7bd6d1f4d4f781", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12541, "license_type": "no_license", "max_line_length": 134, "num_lines": 321, "path": "/shared_segments.py", "repo_name": "dcpssc/thub-microservices", "src_encoding": "UTF-8", "text": "import pymongo\nfrom pymongo import MongoClient\nimport json\nfrom bson import json_util\nfrom bson.json_util import dumps\nimport ast\nimport datetime\nimport pytz\nimport time\nfrom bson.objectid import ObjectId\n# from datetime import datetime, date\nimport calendar\n# import pykalman\n# import numpy as np\n# import matplotlib.pyplot as pyplot\nimport math\nfrom math import radians, cos, sin, asin, sqrt\nimport thread\nimport uuid\n\n# MONGODB_HOST_REMOTE = '129.59.107.160'\nMONGODB_HOST_LOCAL = 'localhost'\n\nMONGODB_PORT = 27017\n\nDB_GTFS = 'nashville-mta22020160309'\nDB_GTFS_STOPS = 'stops'\nDB_GTFS_TRIPS = 'trips'\nDB_GTFS_SHAPES = 'shapes'\nDB_GTFS_ROUTES = 'routes'\nDB_GTFS_STOPTIMES = 'stoptimes'\n\nconnection = MongoClient(MONGODB_HOST_LOCAL, MONGODB_PORT)\n# col_gtfs_stops = connection[DB_GTFS][DB_GTFS_STOPS]\n# col_gtfs_trips = connection[DB_GTFS][DB_GTFS_TRIPS]\n# col_gtfs_routes = connection[DB_GTFS][DB_GTFS_ROUTES]\n# col_gtfs_shapes = connection[DB_GTFS][DB_GTFS_SHAPES]\n# col_gtfs_stoptimes = connection[DB_GTFS][DB_GTFS_STOPTIMES]\nlocal_test = connection[\"thub_segments_nashville-mta22020160309\"]['shared_coordinates']\n\nconnection = MongoClient(\"mongodb://writing:[email protected]:27017\")\nsegments_shared_coordinates = connection[\"thub_segments_\"+DB_GTFS][\"shared_coordinates\"]\nsegments_shared_segments = connection[\"thub_segments_\"+DB_GTFS][\"shared_segments\"]\nsegments_shapes = connection[\"thub_segments_\"+DB_GTFS][\"shapes\"]\n\n\n\nGRID_DISTANCE = 0.00897124867601\nDISTANCE_MAX = 1.0\nDISTANCE_MIN = 0.0\n\ndef resample_coor(lat, lon):\n\tlat, lon = round(lat, 4), round(lon, 4)\n\treturn [lat, lon]\n\ndef cal_distance(origin, destination):\n\tlat1, lon1 = origin\n\tlat2, lon2 = destination\n\tradius = 6367 # km\n\n\tdlat = math.radians(lat2-lat1)\n\tdlon = math.radians(lon2-lon1)\n\ta = math.sin(dlat/2) * math.sin(dlat/2) + math.cos(math.radians(lat1)) \\\n\t * math.cos(math.radians(lat2)) * math.sin(dlon/2) * math.sin(dlon/2)\n\tc = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a))\n\tkm = radius * c\n\treturn km\n\ndef get_shapeids():\n\t# all shape ids\n\tarray_shape_id = []\n\ttpResults = col_gtfs_trips.find() \n\tfor tpResult in tpResults:\n\t\tif tpResult['shape_id'] not in array_shape_id:\n\t\t\tarray_shape_id.append(tpResult['shape_id'])\n\treturn array_shape_id\n\ndef cal_shapdids_sharing_cells():\n\tmap_coors = {}\n\t# with open('tmp.json', 'r') as fp:\n\t# \tmap_coors = json.load(fp)\n\t# \treturn map_coors\n\ttmp_limit = 0\n\tarray_shape_id = get_shapeids()\n\tfor shape_id in array_shape_id:\n\t\tprint \"->\",tmp_limit,\"/\",len(array_shape_id)\n\t\ttmp_limit+=1\n\t\tshape_id = str(shape_id)\n\t\ttpResults = col_gtfs_shapes.find({'shape_id':shape_id}) \\\n\t\t\t.sort([['shape_pt_sequence', pymongo.ASCENDING]])\n\t\tprevious_coor = [0,0]\n\t\t# loop through the coordinates in the shape\n\t\tfor tpResult in tpResults:\n\t\t\ttpResult = json.dumps(tpResult, default=json_util.default)\n\t\t\ttpResult = ast.literal_eval(tpResult)\n\t\t\tnew_coor = resample_coor(float(tpResult['shape_pt_lat']), float(tpResult['shape_pt_lon']))\n\t\t\t# lat, lon = resample_coor(tpResult['shape_pt_lat'], tpResult['shape_pt_lon'])\n\t\t\tdistance_from_origin = tpResult['shape_dist_traveled']\n\t\t\tif previous_coor!=[0,0]:\n\t\t\t\tdistance = cal_distance(new_coor, previous_coor)\n\t\t\t\tif distance > GRID_DISTANCE:\n\t\t\t\t\tsteps = int(distance/GRID_DISTANCE)\n\t\t\t\t\tlat_step = (new_coor[0]-previous_coor[0])/steps\n\t\t\t\t\tlon_step = (new_coor[1]-previous_coor[1])/steps\n\t\t\t\t\tfor i in range(1, steps+1, 1):\n\t\t\t\t\t\tnew_lat = new_coor[0]-i*lat_step\n\t\t\t\t\t\tnew_lon = new_coor[1]-i*lon_step\n\t\t\t\t\t\tnew_lat, new_lon = resample_coor(new_lat, new_lon)\n\t\t\t\t\t\tkey = \"%f,%f\" % (new_lat, new_lon)\n\t\t\t\t\t\tif key in map_coors:\n\t\t\t\t\t\t\tmap_coors[key][shape_id] = distance_from_origin\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tmap_coors[key] = {shape_id:distance_from_origin}\n\t\t\tkey = \"%f,%f\" % (new_coor[0], new_coor[1])\n\t\t\tif key in map_coors:\n\t\t\t\tif shape_id not in map_coors[key]:\n\t\t\t\t\tmap_coors[key][shape_id] = distance_from_origin\n\t\t\telse:\n\t\t\t\tmap_coors[key] = {shape_id:distance_from_origin}\n\t\t\tprevious_coor = [new_coor[0], new_coor[1]]\n\t# with open('tmp.json', 'w') as fp:\n\t# \tjson.dump(map_coors, fp)\n\tprint \"-> calculation is finshed\"\n\ttmp_limit = 0\n\tarray_result = []\n\tfor key, value in map_coors.iteritems():\n\t\tarray_result.append({\"coordinate\":key, \"shapes\":value})\n\t# \tprint \"->\",tmp_limit,\"/8756457\"\n\t# \ttmp_limit+=1\n\tprint \"-> inserting\"\n\tprint len(array_result)\n\tsegments_shared_coordinates.insert_many(array_result, ordered=False)\n\tprint \"-> finishing\"\n\t# return map_coors\n\n# cal_shapdids_sharing_cells()\ncache_coordinate_shared_coordinates = {}\n\ncol_gtfs_stops = connection[DB_GTFS][DB_GTFS_STOPS]\ncol_gtfs_trips = connection[DB_GTFS][DB_GTFS_TRIPS]\ncol_gtfs_routes = connection[DB_GTFS][DB_GTFS_ROUTES]\ncol_gtfs_shapes = connection[DB_GTFS][DB_GTFS_SHAPES]\ncol_gtfs_stoptimes = connection[DB_GTFS][DB_GTFS_STOPTIMES]\ndef filter_shapeids_same_direction(one_shape_id, array_shape_id):\n\ttpResults = col_gtfs_trips.find({\"shape_id\":{\"$in\": array_shape_id}})\n\tmap_shapeid_directionid = {}\n\tfor tpResult in tpResults:\n\t\tmap_shapeid_directionid[tpResult['shape_id']] = tpResult['direction_id']\n\tresult_shapeid = []\n\tfor shape_id in array_shape_id:\n\t\tif map_shapeid_directionid[one_shape_id] == map_shapeid_directionid[shape_id]:\n\t\t\tresult_shapeid.append(shape_id)\n\treturn result_shapeid\n\ndef calculate_segment_for_shapeid(shape_id):\n\ttpResults = col_gtfs_shapes.find({'shape_id':shape_id}) \\\n\t\t.sort([['shape_pt_sequence', pymongo.ASCENDING]])\n\n\tarray_tmp_shape_id = []\n\tarray_distance_segmentdetails = []\n\t# map_distance_shapeid_details = {}\n\tfor tpResult in tpResults:\n\t\ttpResult = json.dumps(tpResult, default=json_util.default)\n\t\ttpResult = ast.literal_eval(tpResult)\n\t\tcurrent_coor = resample_coor(float(tpResult['shape_pt_lat']), float(tpResult['shape_pt_lon']) )\n\t\tkey = \"%f,%f\" % (current_coor[0], current_coor[1])\n\t\tcurrent_distance = str(tpResult['shape_dist_traveled'])\n\t\tshared_shapes = None\n\t\tif key not in cache_coordinate_shared_coordinates:\n\t\t\tshared_shapes = segments_shared_coordinates.find_one({\"coordinate\":key})\n\t\t\tcache_coordinate_shared_coordinates[key] = shared_shapes\n\t\telse:\n\t\t\tshared_shapes = cache_coordinate_shared_coordinates[key]\n\t\tif shared_shapes:\n\t\t\tshared_shapes = json.dumps(shared_shapes, default=json_util.default)\n\t\t\tshared_shapes = ast.literal_eval(shared_shapes)\n\t\t\tshared_shapes = shared_shapes['shapes']\n\n\t\t\tarray_new_shape_id = filter_shapeids_same_direction(shape_id, shared_shapes.keys())\n\n\t\t\tif set(array_new_shape_id) != set(array_tmp_shape_id):\n\t\t\t\t# remove shapeid's that has only one \n\t\t\t\t# if len(array_distance_segmentdetails)>0:\n\n\n\t\t\t\t#\n\t\t\t\tarray_distance_segmentdetails.append({\"distance\":current_distance,\"segment_detail\":{} })\n\t\t\t\tfor one_shapeid in array_new_shape_id:\n\t\t\t\t\tarray_distance_segmentdetails[-1][\"segment_detail\"][one_shapeid] = [ shared_shapes[one_shapeid] ]\n\t\t\t\tlast_checkpoint_distance = current_distance\n\t\t\telse:\n\t\t\t\tfor one_shapeid in array_new_shape_id:\n\t\t\t\t\tif len( array_distance_segmentdetails[-1][\"segment_detail\"][one_shapeid])==1:\n\t\t\t\t\t\tif array_distance_segmentdetails[-1][\"segment_detail\"][one_shapeid][0] < shared_shapes[one_shapeid]:\n\t\t\t\t\t\t\tarray_distance_segmentdetails[-1][\"segment_detail\"][one_shapeid].append( shared_shapes[one_shapeid] )\n\t\t\t\t\telse:\n\t\t\t\t\t\tif array_distance_segmentdetails[-1][\"segment_detail\"][one_shapeid][0] < shared_shapes[one_shapeid]:\n\t\t\t\t\t\t\tarray_distance_segmentdetails[-1][\"segment_detail\"][one_shapeid][1] =shared_shapes[one_shapeid]\n\t\t\tarray_tmp_shape_id = array_new_shape_id\n\tarray_segments_to_insert = []\n\n\tmap_to_append = {}\n\tprocessed_array_segmentdetails = []\n\tfor one_segment_detail in array_distance_segmentdetails:\n\t\tsegment_detail = one_segment_detail['segment_detail']\n\t\t\n\t\tif len(segment_detail[shape_id]) == 2:\n\t\t\tsegment_distance = (segment_detail[shape_id][1]-segment_detail[shape_id][0])\n\t\t\tif segment_distance >= DISTANCE_MIN:\n\t\t\t\t# process previous segment\n\t\t\t\tif map_to_append != {} and len(map_to_append[shape_id])==2:\n\t\t\t\t\tsegment_distance_previous = (map_to_append[shape_id][1]-map_to_append[shape_id][0])\t\t\t\t\t\n\t\t\t\t\tif segment_distance_previous > DISTANCE_MAX:\n\t\t\t\t\t\tdistance_per_step = segment_distance_previous/(int(segment_distance_previous/DISTANCE_MAX) + 1)\n\t\t\t\t\t\tfor step in range(int(segment_distance_previous/DISTANCE_MAX) + 1):\n\t\t\t\t\t\t\tnew_map_to_append = {}\n\t\t\t\t\t\t\tfor key, value in map_to_append.iteritems():\n\t\t\t\t\t\t\t\tnew_map_to_append[key] = [value[0]+step*distance_per_step, value[0]+(step+1)*distance_per_step]\n\t\t\t\t\t\t\tprocessed_array_segmentdetails.append(new_map_to_append)\n\t\t\t\t\telse:\n\t\t\t\t\t\tprocessed_array_segmentdetails.append(map_to_append)\n\n\t\t\t\t# process current segment\n\t\t\t\tif segment_distance >= DISTANCE_MAX:\n\t\t\t\t\tdistance_per_step = segment_distance/(int(segment_distance/DISTANCE_MAX) + 1)\n\t\t\t\t\tfor step in range(int(segment_distance/DISTANCE_MAX) + 1):\n\t\t\t\t\t\tnew_map_to_append = {}\n\t\t\t\t\t\tfor key, value in segment_detail.iteritems():\n\t\t\t\t\t\t\tnew_map_to_append[key] = [value[0]+step*distance_per_step, value[0]+(step+1)*distance_per_step]\n\t\t\t\t\t\tprocessed_array_segmentdetails.append(new_map_to_append)\n\t\t\t\telse:\n\t\t\t\t\tprocessed_array_segmentdetails.append(segment_detail)\n\t\t\t\tmap_to_append = {}\n\t\t\t\tcontinue\n\t\tif map_to_append == {}:\n\t\t\tmap_to_append = segment_detail\n\t\telse:\n\t\t\tnew_map_to_append = {}\n\t\t\tkeys = list(set(map_to_append.keys()).intersection(segment_detail.keys()))\n\t\t\tfor key in keys:\n\t\t\t\tnew_map_to_append[key] = [map_to_append[key][0], segment_detail[key][-1] ]\n\t\t\tmap_to_append = new_map_to_append\n\t# process last segment\n\tif map_to_append != {} and len(map_to_append[shape_id])==2:\n\t\tsegment_distance_previous = (map_to_append[shape_id][1]-map_to_append[shape_id][0])\n\t\tif segment_distance_previous > DISTANCE_MAX:\n\t\t\tdistance_per_step = segment_distance_previous/(int(segment_distance_previous/DISTANCE_MAX) + 1)\n\t\t\tfor step in range(int(segment_distance_previous/DISTANCE_MAX) + 1):\n\t\t\t\tnew_map_to_append = {}\n\t\t\t\tfor key, value in map_to_append.iteritems():\n\t\t\t\t\tnew_map_to_append[key] = [value[0]+step*distance_per_step, value[0]+(step+1)*distance_per_step]\n\t\t\t\tprocessed_array_segmentdetails.append(new_map_to_append)\n\t\t\tmap_to_append = {}\n\t\telse:\n\t\t\tprocessed_array_segmentdetails.append(map_to_append)\n\n\tfor one_segment_detail in processed_array_segmentdetails:\n\t\tsegment_detail = one_segment_detail\n\t\ttpResult = segments_shared_segments.find_one({\"shapes\":segment_detail})\n\t\tsegment_id = None\n\t\tif tpResult:\n\t\t\tsegment_id = tpResult['segment_id']\n\t\telse:\n\t\t\tsegment_id = str(uuid.uuid4())\n\t\t\tsegments_shared_segments.insert({\"shapes\":segment_detail, \"segment_id\": segment_id})\n\t\tif segment_id:\n\t\t\tarray_segments_to_insert.append({\"distance\": segment_detail[shape_id], \"segment_id\": segment_id, 'details': segment_detail.keys()})\n\t# segments_shapes.insert({\"shape_id\":shape_id, \"segments\":array_segments_to_insert})\n\treturn {\"shape_id\":shape_id, \"segments\":array_segments_to_insert}\n\n# calculate_segment_for_shapeid('9971')\n\ndef calculate_segments_for_all_shapeids():\n\tarray_shapeids = get_shapeids()\n\tarray_result = []\n\tfor shape_id in array_shapeids:\n\t\t# shape_id = \"9971\"\n\t\tprint \"calculating shape_id:\", array_shapeids.index(shape_id),\"/\",len(array_shapeids)\n\t\tarray_result.append(calculate_segment_for_shapeid(shape_id))\n\tsegments_shapes.insert_many(array_result, ordered=False)\n\ndef copy_shared_coordinates_from_local():\n\ttbResults = col_test.find()\n\tiii=0\n\tarray_result = []\n\tcount = tbResults.count()\n\tfor tpResult in tbResults:\n\t\tiii+=1\n\t\tif iii%200==0:\n\t\t\tprint \"->\", iii, count\n\t\ttpResult.pop(\"_id\", None)\n\t\tarray_result.append(tpResult)\n\tsegments_shared_coordinates.insert_many(array_result, ordered=False)\n\n# calculate_segments_for_all_shapeids()\n\ndef copy_shared_shapes_from_local():\n\tMONGODB_HOST_LOCAL = 'localhost'\n\tMONGODB_PORT = 27017\n\n\tconnection = MongoClient(MONGODB_HOST_LOCAL, MONGODB_PORT)\n\tcol_test = connection[\"thub_segments_nashville-mta22020160309\"]['shared_segments']\n\n\tconnection = MongoClient(\"mongodb://writing:[email protected]:27017\")\n\tsegments_shared_coordinates = connection[\"thub_segments_\"+DB_GTFS][\"shared_coordinates\"]\n\tsegments_shared_segments = connection[\"thub_segments_\"+DB_GTFS][\"shared_segments\"]\n\tsegments_shapes = connection[\"thub_segments_\"+DB_GTFS][\"shapes\"]\n\ttbResults = col_test.find()\n\tiii=0\n\tarray_result = []\n\tcount = tbResults.count()\n\tfor tpResult in tbResults:\n\t\tiii+=1\n\t\tif iii%200==0:\n\t\t\tprint \"->\", iii, count\n\t\ttpResult.pop(\"_id\", None)\n\t\tarray_result.append(tpResult)\n\tsegments_shared_segments.insert_many(array_result, ordered=False)\ncopy_shared_shapes_from_local()\n\nprint cal_distance([36.1660,-86.8070], [36.1660,-86.8071])" } ]
9
iequivocality/seidownpy
https://github.com/iequivocality/seidownpy
3065b16b03a3e65f5f38d0bcb4a3715de6fd2738
e56c83fefb3d7d353a204b83ad79fc74a3888c32
c7fd078d9aa92178fa65a1ea62c444cff27ae12e
refs/heads/master
2021-09-26T12:17:15.711391
2018-10-30T03:37:59
2018-10-30T03:37:59
77,491,699
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.6554622054100037, "alphanum_fraction": 0.658823549747467, "avg_line_length": 32.05555725097656, "blob_id": "8b85d0250672f3082fd60a44ca76dc56e709fc4f", "content_id": "8f3f2652b4a809dffc50ac935f394fb5f59e8113", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 595, "license_type": "no_license", "max_line_length": 65, "num_lines": 18, "path": "/seidownpy/pipelines.py", "repo_name": "iequivocality/seidownpy", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# Define your item pipelines here\n#\n# Don't forget to add your pipeline to the ITEM_PIPELINES setting\n# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html\nimport scrapy\nfrom scrapy.pipelines.images import ImagesPipeline\n\nclass SeidownImagePipeline(ImagesPipeline):\n def get_media_requests(self, item, info):\n for file_url in item['image_urls']:\n \tif file_url is None:\n \t\tcontinue\n \tnew_file_url = file_url\n \tif \"?\" in file_url:\n \t\tnew_file_url = file_url.split(\"?\")[0]\n \tyield scrapy.Request(new_file_url)\n" }, { "alpha_fraction": 0.6320434212684631, "alphanum_fraction": 0.6469789743423462, "avg_line_length": 36.79487228393555, "blob_id": "eaff34e53c266cb717dcea45aac5d35997fed7df", "content_id": "c8b42c9698551d8cb08da86d72c383439db588f0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1473, "license_type": "no_license", "max_line_length": 124, "num_lines": 39, "path": "/seidownpy/spiders/KeyakiSpider.py", "repo_name": "iequivocality/seidownpy", "src_encoding": "UTF-8", "text": "from seidownpy.items import KeyakiItem\n\nimport scrapy\n\nclass KeyakiSpider(scrapy.Spider):\n URL_SYNTAX = 'http://www.keyakizaka46.com/s/k46o/diary/member/list?ima=0000&page=%s&cd=member&ct=%s'\n name = 'keyaki'\n\n def __init__(self, name='', first=\"0\", last=\"1\", *args, **kwargs):\n\t\tsuper(KeyakiSpider, self).__init__(*args, **kwargs)\n\t\tself.start_urls = ['http://www.keyakizaka46.com/s/k46o/diary/member/list?ima=0000&ct=%s' % name]\n\t\tself.main_name = name\n\t\tself.first_int = self._get_int(first)\n\t\tself.last_int = self._get_int(last)\n\n def _get_int(self, int_str):\n\t\tif not int_str.isdigit() or int(int_str) < 0:\n\t\t\traise ValueError(\"Page number must be a positive digit\")\n\t\treturn int(int_str)\n\n def start_requests(self):\n first_int = self.first_int\n last_int = self.last_int\n step = self._get_step(first_int, last_int)\n for page_number in range(first_int, last_int + 1, step):\n yield scrapy.Request(self.URL_SYNTAX % (page_number, self.main_name), callback=self.parse)\n\n def _get_step(self, first_int, last_int):\n\t\tif first_int >= last_int:\n\t\t\treturn -1\n\t\telse:\n\t\t\treturn 1\n\n def parse(self, response):\n box_main = response.css(\"div.keyaki-blog_list div.l-wrapper div.l-content div.l-inner div.box-content div.box-main\")\n main_contents = box_main.css('img')\n for content in main_contents:\n url = content.xpath(\"@src\").extract_first()\n yield KeyakiItem(image_urls=[url])" }, { "alpha_fraction": 0.7091121673583984, "alphanum_fraction": 0.7102803587913513, "avg_line_length": 21.526315689086914, "blob_id": "ab391b030ce20c75ab611e0a5147bd080973c4c0", "content_id": "b263f7a98bb9a5a0807359333342c5ec932a1e16", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 856, "license_type": "no_license", "max_line_length": 51, "num_lines": 38, "path": "/seidownpy/items.py", "repo_name": "iequivocality/seidownpy", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# Define here the models for your scraped items\n#\n# See documentation in:\n# http://doc.scrapy.org/en/latest/topics/items.html\n\nimport scrapy\n\nclass AmebloItem(scrapy.Item):\n\titem_id = scrapy.Field()\n\timage_urls = scrapy.Field()\n\timages = scrapy.Field()\n\nclass KeyakiItem(scrapy.Item):\n\titem_id = scrapy.Field()\n\timage_urls = scrapy.Field()\n\timages = scrapy.Field()\n\nclass SingleItem(scrapy.Item):\n\timage_urls = scrapy.Field()\n\timages = scrapy.Field()\n\nclass LineBlogItem(scrapy.Item):\n\titem_source = scrapy.Field();\n\timage_urls = scrapy.Field()\n\timages = scrapy.Field()\n\nclass TumblrItem(scrapy.Item):\n\titem_id = scrapy.Field()\n\titem_source = scrapy.Field()\n\timage_urls = scrapy.Field()\n\timages = scrapy.Field()\n\nclass InstagramItem(scrapy.Item):\n\titem_source = scrapy.Field()\n\timage_urls = scrapy.Field()\n\timages = scrapy.Field() " }, { "alpha_fraction": 0.6733668446540833, "alphanum_fraction": 0.67902010679245, "avg_line_length": 29.596153259277344, "blob_id": "62eca5498fab9c020fae35b13c2852d9ee240370", "content_id": "0f57f8f6b9732b2aafce3a39d525e62ef60a8c5b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1592, "license_type": "no_license", "max_line_length": 72, "num_lines": 52, "path": "/seidownpy/spiders/LineBlogSpider.py", "repo_name": "iequivocality/seidownpy", "src_encoding": "UTF-8", "text": "from seidownpy.items import LineBlogItem\n\nimport datetime\nimport scrapy\nimport os\n\nclass LineBlogSpider(scrapy.Spider):\n\tURL_SYNTAX = 'http://lineblog.me/%s/?p=%s'\n\tname = \"lineblog\"\n\n\tdef __init__(self, name='', first='0', last='1', *args, **kwargs):\n\t\tif not first.isdigit() or not last.isdigit():\n\t\t\traise ValueError(\"Page number must be a positive digit\")\n\n\t\tsuper(LineBlogSpider, self).__init__(*args, **kwargs)\n\t\tself.start_urls = ['http://lineblog.me/%s' % name]\n\t\tself.main_name = name\n\t\tself.page_urls = self._create_urls(first, last)\n\n\tdef _create_urls(self, first=0, last=1):\n\t\tif not first.isdigit() or not last.isdigit():\n\t\t\traise ValueError(\"Page number must be a positive digit\")\n\n\t\tfirst_int = int(first)\n\t\tlast_int = int(last)\n\n\t\tif first_int < 0 or last_int < 0:\n\t\t\traise ValueError(\"Page number must be a positive digit\")\n\n\t\turls = []\n\t\tstep = self._get_step(first_int, last_int)\n\t\turls.append('http://lineblog.me/%s' % self.main_name)\n\t\tfor page_number in range(first_int, last_int + 1, step):\n\t\t\turls.append(self.URL_SYNTAX % (self.main_name, page_number))\n\t\treturn urls\n\n\tdef _get_step(self, first_int, last_int):\n\t\tif first_int >= last_int:\n\t\t\treturn -1\n\t\telse:\n\t\t\treturn 1\n\n\tdef start_requests(self):\n\t\tfor url in self.page_urls:\n\t\t\tyield scrapy.Request(url, callback=self.parse)\n\n\tdef parse(self, response):\n\t\tarticles = response.css(\"div#main-inner article.article\")\n\t\tfor article in articles:\n\t\t\tfor url in article.css(\"img.pict\").xpath(\"@src\").extract():\n\t\t\t\ttempURL = url[:url.rfind('/')]\n\t\t\t\tyield LineBlogItem(item_source=self.main_name, image_urls=[tempURL])\n\n" }, { "alpha_fraction": 0.6612510085105896, "alphanum_fraction": 0.6718115210533142, "avg_line_length": 37.5, "blob_id": "3cb8a00922114ef4c9399976221677190051ccbf", "content_id": "cca8eadf527f39491a5eff13753e94230ba1cd26", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1231, "license_type": "no_license", "max_line_length": 95, "num_lines": 32, "path": "/seidownpy/spiders/InstagramSpider.py", "repo_name": "iequivocality/seidownpy", "src_encoding": "UTF-8", "text": "from seidownpy.items import InstagramItem\nimport scrapy\n\nclass InstagramSpider(scrapy.Spider):\n\tURL_SYNTAX = 'https://www.instagram.com%s'\n\tname = \"instagram\"\n\n\tdef __init__(self, name='', *args, **kwargs):\n\t\tself.main_name = '/%s' % (name)\n\t\tsuper(InstagramSpider, self).__init__(*args, **kwargs)\n\n\tdef start_requests(self):\n\t\tyield scrapy.Request(self.URL_SYNTAX % (self.main_name), callback=self.parse)\n\n\tdef parse(self, response):\n\t\tmain = response.css(\"#react-root\").css(\"main._6ltyr\")\n\t\tarticle = main.css(\"article._42elc\")\n\t\tfor _myci9 in article.css(\"._nljxa\").css(\"._myci9\"):\n\t\t\tcol = _myci9.css(\"._8mlbc\").xpath(\"@href\").extract()\n\t\t\tfor item in col:\n\t\t\t\tyield scrapy.Request(url=self.URL_SYNTAX % (item), callback=self.parse_item)\n\n\t\tnext_page = article.css(\"._8imhp\").xpath(\"@href\")\n\t\tif len(next_page) > 0:\n\t\t\tyield scrapy.Request(url=self.URL_SYNTAX % (next_page.extract_first()), callback=self.parse)\n\n\tdef parse_item(self, response):\n\t\tmain = response.css(\"#react-root\").css(\"main._6ltyr\")\n\t\tarticle = main.css(\"article._ksjel > div\")\n\t\tfor _jjzlb in article.css(\"._22yr2\").css(\"._jjzlb\"):\n\t\t\tcol = _jjzlb.css(\"img\").xpath(\"@src\").extract_first()\n\t\t\tyield InstagramItem(item_source=self.main_name, image_urls=[col])" }, { "alpha_fraction": 0.6623024940490723, "alphanum_fraction": 0.6668171286582947, "avg_line_length": 37.18965530395508, "blob_id": "7ae3cf63233b6e542e28a72f9f4b34bdc16f9eb4", "content_id": "9c824ff3374ab0efe1782cdf812250e6f097d985", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2215, "license_type": "no_license", "max_line_length": 134, "num_lines": 58, "path": "/seidownpy/spiders/AmebloSpider.py", "repo_name": "iequivocality/seidownpy", "src_encoding": "UTF-8", "text": "from seidownpy.items import AmebloItem\n\nimport datetime\nimport scrapy\nimport os\n\nclass AmebloSpider(scrapy.Spider):\n\tENTRY_URL_SYNTAX = 'http://ameblo.jp/%s/entry-%s.html' \n\tPAGE_URL_SYNTAX = 'http://ameblo.jp/%s/page-%s.html'\n\tname = \"ameblo\"\n\tstart_urls = [\"http://ameblo.jp/\"]\n\n\tdef __init__(self, name='', entry=None, first=\"0\", last=\"1\", *args, **kwargs):\n\t\tsuper(AmebloSpider, self).__init__(*args, **kwargs)\n\t\tself.start_urls = ['http://ameblo.jp/%s/' % name]\n\t\tself.main_name = name\n\t\tself.entry_no = entry\n\t\tself.first_int = self._get_int(first)\n\t\tself.last_int = self._get_int(last)\n\n\tdef _get_int(self, int_str):\n\t\tif not int_str.isdigit() or int(int_str) < 0:\n\t\t\traise ValueError(\"Page number must be a positive digit\")\n\t\treturn int(int_str)\n\n\tdef start_requests(self):\n\t\tif self.entry_no is None:\n\t\t\tfirst_int = self.first_int\n\t\t\tlast_int = self.last_int\n\t\t\tstep = self._get_step(first_int, last_int)\n\t\t\tfor page_number in range(first_int, last_int + 1, step):\n\t\t\t\tyield scrapy.Request(self.PAGE_URL_SYNTAX % (self.main_name, page_number), callback=self.parse)\n\t\telse:\n\t\t\tyield scrapy.Request(self.ENTRY_URL_SYNTAX % (self.main_name, self.entry_no), callback=self.parse)\n\n\tdef _get_step(self, first_int, last_int):\n\t\tif first_int >= last_int:\n\t\t\treturn -1\n\t\telse:\n\t\t\treturn 1\n\n\tdef parse(self, response):\n\t\tmain = response.css(\"div#main\")\n\t\ttile = main.xpath(\"//div[@amb-component='tiles']\")\n\t\tif (len(tile.extract()) > 0):\n\t\t\ttileList = tile.xpath(\"//ul[@amb-component='tileList']\")\n\t\t\titemBody = tileList.xpath(\"//div[@amb-component='tileItem']\").xpath(\"//div[@amb-component='tileItemBody']\")\n\t\t\titemTitleLink = itemBody.xpath(\"//h2[@amb-component='tileItemTitle']\").xpath(\"//a[@class='skin-titleLink']\")\n\t\t\tfor link in itemTitleLink:\n\t\t\t\tyield scrapy.Request(link.xpath(\"@href\").extract_first(), callback=self.parse)\n\t\t\treturn\n\n\t\tdefault = main.xpath(\"//article[@data-unique-ameba-id='%s'] | //div[@data-unique-ameba-id='%s']\" % (self.main_name, self.main_name))\n\t\tif (len(default.extract()) > 0):\n\t\t\tfor u in default.xpath(\"//a/img\"):\n\t\t\t\timageURL = u.xpath(\"@src\").extract_first()\n\t\t\t\timageID = os.path.basename(imageURL).split(\"?\")[0]\n\t\t\t\tyield AmebloItem(item_id=imageID, image_urls=[imageURL])\n" }, { "alpha_fraction": 0.7076349854469299, "alphanum_fraction": 0.7109869718551636, "avg_line_length": 34.813331604003906, "blob_id": "88df0884b041d2a3b226f34ab08f0cf387fef7c0", "content_id": "4e7979f11f2848fbacbd472c6b60c7f0cec34324", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2685, "license_type": "no_license", "max_line_length": 92, "num_lines": 75, "path": "/seidownpy/spiders/TumblrSpider.py", "repo_name": "iequivocality/seidownpy", "src_encoding": "UTF-8", "text": "from seidownpy.items import TumblrItem\n\nimport scrapy\n\nclass TumblrSpider(scrapy.Spider):\n\tFIRSTPAGE_URL_SYNTAX = 'http://%s.tumblr.com'\n\tOTHERPAGE_URL_SYNTAX = 'http://%s.tumblr.com/page/%s'\n\tname = \"tumblr\"\n\n\tdef __init__(self, name='', first='1', last='1', *args, **kwargs):\n\t\tif not first.isdigit() or not last.isdigit():\n\t\t\traise ValueError(\"Page number must be a positive digit\")\n\n\t\tsuper(TumblrSpider, self).__init__(*args, **kwargs)\n\t\tself.main_name = name\n\t\tself.start_urls = [self.FIRSTPAGE_URL_SYNTAX % name]\n\t\tself.page_urls = self._create_urls(first, last)\n\n\tdef _create_urls(self, first=0, last=1):\n\t\tif not first.isdigit() or not last.isdigit():\n\t\t\traise ValueError(\"Page number must be a positive digit\")\n\n\t\tfirst_int = int(first)\n\t\tlast_int = int(last)\n\n\t\tif first_int < 0 or last_int < 0:\n\t\t\traise ValueError(\"Page number must be a positive digit\")\n\n\t\turls = []\n\t\tstep = self._get_step(first_int, last_int)\n\t\tfor page_number in range(first_int, last_int + 1, step):\n\t\t\turls.append(self.OTHERPAGE_URL_SYNTAX % (self.main_name, page_number))\n\t\treturn urls\n\n\tdef _get_step(self, first_int, last_int):\n\t\tif first_int >= last_int:\n\t\t\treturn -1\n\t\telse:\n\t\t\treturn 1\n\n\tdef start_requests(self):\n\t\tfor url in self.page_urls:\n\t\t\tyield scrapy.Request(url, callback=self.parse)\n\n\tdef parse(self, response):\n\t\tfor photo_type_item in self._parse_photo_type(response):\n\t\t\tyield photo_type_item\n\n\t\tfor photoset_type_item in self._parse_photoset_type(response):\n\t\t\tyield photoset_type_item\n\n\tdef _parse_photo_type(self, response):\n\t\tpost_contents = response.css(\"div.post-type-photo div.post-content\")\n\t\tfor post_content in post_contents:\n\t\t\thighres = post_content.css(\"a.high-res\").xpath(\"@href\").extract_first()\n\t\t\tyield TumblrItem(item_id='', item_source=self.main_name, image_urls=[highres])\n\n\tdef _parse_photoset_type(self, response):\n\t\tpost_contents = response.css(\"div.post-type-photoset div.post-content\")\n\t\tfor post_content in post_contents:\n\t\t\tiframe = post_content.css(\"iframe.photoset\")\n\t\t\turl = iframe.xpath(\"@src\").extract_first()\n\t\t\tyield scrapy.Request(url, callback=self.parse_photo_from_photoset)\n\n\tdef parse_photo_from_photoset(self, response):\n\t\tphotoset_rows = response.css(\"div.photoset div.photoset_row\")\n\t\tfor photoset_row in photoset_rows:\n\t\t\tphotoset_photos = photoset_row.css(\"a.photoset_photo\")\n\t\t\tprint photoset_photos\n\t\t\tyield self._extract_items_from_photoset_photos(photoset_photos)\n\n\tdef _extract_items_from_photoset_photos(self, photoset_photos):\n\t\tfor photoset_photo in photoset_photos:\n\t\t\tphotoset_photo_url = photoset_photo.xpath(\"@href\").extract_first()\n\t\t\tyield TumblrItem(item_id='', item_source=self.main_name, image_urls=[photoset_photo_url])" }, { "alpha_fraction": 0.6772615909576416, "alphanum_fraction": 0.6772615909576416, "avg_line_length": 24.5, "blob_id": "afc73493fa4b1c2d5570ad8a558b1edf7238732a", "content_id": "10c4b0d49156cd216165b6c8161f0488ad20103c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 409, "license_type": "no_license", "max_line_length": 53, "num_lines": 16, "path": "/seidownpy/spiders/SingleSpider.py", "repo_name": "iequivocality/seidownpy", "src_encoding": "UTF-8", "text": "from seidownpy.items import SingleItem\n\nimport scrapy\n\nclass SingleSpider(scrapy.Spider):\n\tname = \"single\"\n\n\tdef __init__(self, link='', *args, **kwargs):\n\t\tsuper(SingleSpider, self).__init__(*args, **kwargs)\n\t\tself.start_urls = [link]\n\n\tdef parse(self, response):\n\t\timgs = response.xpath(\"//img\")\n\t\tfor img in imgs:\n\t\t\timageURL = img.xpath(\"@src\").extract_first()\n\t\t\tyield SingleItem(image_urls=[imageURL])\n\n" }, { "alpha_fraction": 0.6912071704864502, "alphanum_fraction": 0.6950819492340088, "avg_line_length": 35.46739196777344, "blob_id": "060a3136b57b7de67704cb081247220ab4eb5988", "content_id": "aad83d9d6ee2e4e6aabdc21dabef45caec1ebe7d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3355, "license_type": "no_license", "max_line_length": 134, "num_lines": 92, "path": "/README.md", "repo_name": "iequivocality/seidownpy", "src_encoding": "UTF-8", "text": "SeidownPy v0.5\n=============\nAn application that will download images from any link or any of the supported websites.\nBuilt-on top of Scrapy, a Python based web scrapping framework.\n\n**Support**\n* [ameblo](http://ameblo.jp)\n* [tumblr](https://tumblr.com)\n* [lineblog](http://lineblog.me/)\n* [single] - any link\n* [Other links]\n\n**Future Support**\n* Trivial face-detection\n\n**Requirements**\n* scrapy\n* image\n\n*Both can installed via pip*\n\n**Installation**\n**Mac OS X**\n1. Open Terminal\n2. Run the following command: python\n3. Install virtualenv through pip (pip install virtualenv). This is in order to prevent affecting the built-in Python executable.\n\n**Customization**\n* Changing file store for scrapy.\n - Go to *settings.py*\n - Change the FILES_STORE at the bottom of the file to the desired value.\n* Reference for custom settings can be found here.\n - https://doc.scrapy.org/en/latest/topics/settings.html#project-settings-module\n\n**Ameblo**\n**Per entry**\nscrapy crawl **ameblo** -a name=*name* -a entry=*entrynumber*\n* ameblo (*required*) - images will be fetched from http://ameblo.jp\n* name (*required*) - name of the blog where images will be fetched from\n* entrynumber (*required*) - entry number which can be extracted from links\n*Example:* http://ameblo.jp/someblog/entry-*entrynumber*.html\n\n./ameblo.sh **name** **entrynumber**\n\n**Bulk**\n* scrapy crawl **ameblo** -a **first**=*first page* -a **last**=*last_page* -a **name**=*blog* -o *output file*\n\n* ameblo (*required*) - images will be fetched from http://ameblo.jp\n* name (*required*) - name of the blog where images will be fetched from\n* first (*optional*) - first page where images are fetched\n* last (*optional*) - last page where images are fetched\n* o (*optional*) - output file where logs are kept\n\n./ameblo.sh **name** **first** **last**\n\n**Tumblr**\n* scrapy crawl **tumblr** -a **first**=*first page* -a **last**=*last_page* -a **name**=*blog* -o *output file*\n\n* ameblo (*required*) - images will be fetched from http://ameblo.jp\n* name (*required*) - name of the blog where images will be fetched from\n* first (*optional*) - first page where images are fetched\n* last (*optional*) - last page where images are fetched\n* o (*optional*) - output file where logs are kept\n\n./tumblr.sh **name** **first** **last**\n\n**Lineblog**\n* scrapy crawl **lineblog** -a **first**=*first page* -a **last**=*last_page* -a **name**=*blog* -o *output file*\n\n* ameblo (*required*) - images will be fetched from http://ameblo.jp\n* name (*required*) - name of the blog where images will be fetched from\n* first (*optional*) - first page where images are fetched\n* last (*optional*) - last page where images are fetched\n* o (*optional*) - output file where logs are kept\n\n./lineblog.sh **name** **first** **last**\n\n**Other Links**\n* scrapy crawl **single** -a **link**=*url*\n\n**Shortcuts**\nScripts are added under the script folder which are basically shortcuts to the syntax, without the need for typing arguments yourself.\n\n**Reference**\n-------------\n* [Scraping images with Python and Scrapy] (http://www.pyimagesearch.com/2015/10/12/scraping-images-with-python-and-scrapy/)\n* [Spiders] (https://doc.scrapy.org/en/latest/topics/spiders.html)\n* [Downloading and processing files and images] (https://doc.scrapy.org/en/latest/topics/media-pipeline.html)\n\n**Version History**\n\nTo be added.\n" } ]
9
walterhuangfs/xgboost-trainer
https://github.com/walterhuangfs/xgboost-trainer
9cc21b604fc6d9e6d12bdef2572899566e7bfcfb
ea6a0afea6e2023df982387e38ab114f860a4ee6
9b43d75dde3ae1152d141835a8447082826afbf4
refs/heads/master
2021-05-05T23:48:59.114057
2018-01-09T04:16:58
2018-01-09T04:16:58
116,764,828
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7631579041481018, "alphanum_fraction": 0.7631579041481018, "avg_line_length": 27.5, "blob_id": "69f00e5ea6a5981a59b9e12b1f8a03dca9b37276", "content_id": "1ef3378703ed2274eff7e3b8c6def7ebcbb93d10", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 228, "license_type": "no_license", "max_line_length": 50, "num_lines": 8, "path": "/webapp/models.py", "repo_name": "walterhuangfs/xgboost-trainer", "src_encoding": "UTF-8", "text": "from django.db import models\n\n# Create your models here.\nclass XgboostModel(models.Model):\n created = models.DateTimeField(\"date created\")\n\nclass TrainingData(models.Model):\n created = models.DateTimeField(\"date created\")\n" } ]
1
Samir529/Billboard-Advertisement-System
https://github.com/Samir529/Billboard-Advertisement-System
3119ff4103b886de06d0990d1e5c04173ad9cbad
6dfeaf5abdb8c8f0a9d3f9ff30fbfa19317ccc75
43110c02bed2a504d508f8f230d406b9ed96e52e
refs/heads/main
2023-08-21T23:13:39.422964
2021-10-01T17:18:57
2021-10-01T17:18:57
382,419,147
1
0
null
2021-07-02T17:33:43
2021-07-21T16:08:59
2021-09-11T10:13:32
null
[ { "alpha_fraction": 0.5671471953392029, "alphanum_fraction": 0.5697395205497742, "avg_line_length": 39.204833984375, "blob_id": "617a697a185789b8bc5f32a989a770d694a770ab", "content_id": "9a9c7ee80981fce1d49fc5301a1b3804f06e8b5b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 31632, "license_type": "no_license", "max_line_length": 171, "num_lines": 786, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/views.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "import copy\nimport random\n\nfrom django.contrib import messages\nfrom django.contrib.auth.models import User\nfrom django.db.models import Count\nfrom django.utils import timezone\n\nfrom django.contrib.auth import authenticate, login, update_session_auth_hash\nfrom django.contrib.auth import logout\nfrom django.http import HttpResponseRedirect, HttpResponse\nfrom django.urls import reverse\nfrom django.contrib.auth.decorators import login_required\n\nfrom django.shortcuts import render, redirect\nimport datetime\n\nfrom .filter import billboardFilter, billboardFilter2\nfrom .forms import UserForm, customerProfilePicForm, advertiserProfilePicForm, cityCorporationProfilePicForm, post_form, \\\n confirm_post_form, changePassForm, billboardPicForm\nfrom .models import CustomerProfileInfo, CityCorporationProfileInfo, AdvertiserProfileInfo, confirm_post, PostAdvertiseTable, CurrentPriceUpdate\n\n\ndef home(req):\n allPosts = PostAdvertiseTable.objects.all().order_by('-post_date')\n allConfirmedposts = confirm_post.objects.all()\n\n billboard_filter = billboardFilter2(req.GET, queryset=allPosts)\n context = {'allPosts': allPosts, 'allConfirmedposts': allConfirmedposts, 'filter': billboard_filter}\n return render(req, 'home.html', context)\n\ndef base(req):\n return render(req, 'base.html')\n\ndef about(request):\n return render(request, 'about.html')\n\ndef aboutUs(request):\n return render(request, 'about_us.html')\n\n@login_required\ndef staffPanel(request):\n return render(request, 'staffPanel.html')\n\n@login_required\ndef customerPanel(request):\n return render(request, 'Customer_panel.html')\n\n@login_required\ndef advertiserPanel(request):\n return render(request, 'Advertiser_panel.html')\n\n@login_required\ndef cityCorporationPanel(request):\n return render(request, 'cityCorporation_panel.html')\n\ndef sign_in_options(request):\n if request.method == 'POST':\n if 'Customer' in request.POST:\n return HttpResponseRedirect(reverse('register_customer'))\n elif 'Advertiser' in request.POST:\n return HttpResponseRedirect(reverse('register_advertiser'))\n # elif 'City_Corporation' in request.POST:\n # return HttpResponseRedirect(reverse('register_cityCorporation'))\n return render(request, 'sign_in_options.html')\n\n\ndef staff_login(request):\n isStaff = 'a'\n if request.method == 'POST':\n username = request.POST.get('username')\n password = request.POST.get('password')\n remember_me = request.POST.get('remember')\n\n user = authenticate(username=username, password=password)\n\n if user:\n if user.is_staff:\n login(request, user)\n if not remember_me:\n request.session.set_expiry(0)\n return HttpResponseRedirect(reverse('staffPanel'))\n else:\n isStaff = 'not_staff'\n return render(request, 'staff_login.html', {'isStaff': isStaff})\n else:\n isStaff = 'not_user'\n return render(request, 'staff_login.html', {'isStaff': isStaff})\n return render(request, 'staff_login.html', {'isStaff': isStaff})\n\n\ndef user_login(request):\n isuser = 'a'\n if request.method == 'POST':\n username = request.POST.get('username')\n password = request.POST.get('password')\n remember_me = request.POST.get('remember')\n\n user = authenticate(username=username, password=password)\n\n if user:\n if user.is_staff == False:\n if user.is_active:\n login(request, user)\n if not remember_me: # unchecked\n request.session.set_expiry(0) # if exits from browser then login will lost,\n # else, if exits from browser then login will not lost\n try:\n c = CustomerProfileInfo.objects.get(user=request.user)\n if c.is_customer==True:\n return HttpResponseRedirect(reverse('customerPanel'))\n except CustomerProfileInfo.DoesNotExist:\n try:\n a = AdvertiserProfileInfo.objects.get(user=request.user)\n if a.is_advertiser == True:\n return HttpResponseRedirect(reverse('advertiserPanel'))\n except AdvertiserProfileInfo.DoesNotExist:\n try:\n ct = CityCorporationProfileInfo.objects.get(user=request.user)\n if ct.is_cityCor == True:\n return HttpResponseRedirect(reverse('cityCorporationPanel'))\n except CityCorporationProfileInfo.DoesNotExist:\n return HttpResponse(\"Account is Not Active.\")\n else:\n return HttpResponse(\"Account is Not Active.\")\n elif user.is_staff == True:\n if user.is_active:\n isuser = 'staff_user'\n login(request, user)\n if not remember_me:\n request.session.set_expiry(0)\n return HttpResponseRedirect(reverse('staffPanel'))\n else:\n return HttpResponse(\"Account is Not Active.\")\n else:\n isuser = 'not_user'\n\n return render(request, 'user_login.html', {'isuser': isuser})\n\n\n@login_required\ndef user_logout(request):\n logout(request)\n return HttpResponseRedirect(reverse('home'))\n\n\ndef register_customer(request):\n registered = False\n match_password = 1\n\n if request.method == 'POST':\n\n mobileNo = request.POST.get('mobileNo')\n location = request.POST.get('location')\n\n user_form = UserForm(data=request.POST)\n profile_picture_form = customerProfilePicForm(request.POST, request.FILES)\n if user_form.is_valid() and profile_picture_form.is_valid() and user_form.cleaned_data['password'] == user_form.cleaned_data['confirm_password']:\n user = user_form.save()\n user.set_password(user.password)\n user.save()\n\n t = CustomerProfileInfo()\n # t2 = User()\n t.mobileNo = mobileNo\n t.location = location\n t.is_customer = True\n t.user = user\n t.profile_picture = profile_picture_form.cleaned_data['profile_picture']\n t.save()\n # t2.username = username\n # t2.password = password\n # t2.first_name = first_name\n # t2.last_name = last_name\n # t2.email = email\n # t2.save()\n\n registered = True\n\n elif user_form.data['password'] != user_form.data['confirm_password']:\n # print('password and confirm password does not match')\n match_password = 0\n # user_form.add_error('confirm_password', 'password and confirm password does not match')\n else:\n print(user_form.errors, profile_picture_form.errors)\n else:\n user_form = UserForm()\n profile_picture_form = customerProfilePicForm()\n return render(request, 'customer_registration.html',\n {'user_form': user_form, 'profile_picture_form': profile_picture_form, 'registered': registered, 'match_password': match_password})\n\n\ndef register_advertiser(request):\n registered = False\n match_password = 1\n\n if request.method == 'POST':\n\n mobileNo = request.POST.get('mobileNo')\n location = request.POST.get('location')\n\n user_form = UserForm(data=request.POST)\n profile_picture_form = advertiserProfilePicForm(request.POST, request.FILES)\n if user_form.is_valid() and profile_picture_form.is_valid() and user_form.cleaned_data['password'] == user_form.cleaned_data['confirm_password']:\n user = user_form.save()\n user.set_password(user.password)\n user.save()\n\n t = AdvertiserProfileInfo()\n t.mobileNo = mobileNo\n t.location = location\n t.is_advertiser = True\n t.user = user\n t.profile_picture = profile_picture_form.cleaned_data['profile_picture']\n t.save()\n\n registered = True\n\n elif user_form.data['password'] != user_form.data['confirm_password']:\n # print('password and confirm password does not match')\n match_password = 0\n # user_form.add_error('confirm_password', 'password and confirm password does not match')\n else:\n print(user_form.errors, profile_picture_form.errors)\n else:\n user_form = UserForm()\n profile_picture_form = advertiserProfilePicForm()\n return render(request, 'advertiser_registration.html',\n {'user_form': user_form, 'profile_picture_form': profile_picture_form, 'registered': registered, 'match_password': match_password})\n\n\ndef register_cityCorporation(request):\n registered = False\n match_password = 1\n\n if request.method == 'POST':\n\n mobileNo = request.POST.get('mobileNo')\n location = request.POST.get('location')\n\n user_form = UserForm(data=request.POST)\n profile_picture_form = cityCorporationProfilePicForm(request.POST, request.FILES)\n if user_form.is_valid() and profile_picture_form.is_valid() and user_form.cleaned_data['password'] == user_form.cleaned_data['confirm_password']:\n user = user_form.save()\n user.set_password(user.password)\n user.save()\n\n t = CityCorporationProfileInfo()\n t.mobileNo = mobileNo\n t.location = location\n t.is_cityCor = True\n t.user = user\n t.profile_picture = profile_picture_form.cleaned_data['profile_picture']\n t.save()\n\n registered = True\n\n elif user_form.data['password'] != user_form.data['confirm_password']:\n # print('password and confirm password does not match')\n match_password = 0\n # user_form.add_error('confirm_password', 'password and confirm password does not match')\n else:\n print(user_form.errors, profile_picture_form.errors)\n else:\n user_form = UserForm()\n profile_picture_form = cityCorporationProfilePicForm()\n return render(request, 'govt_registration.html',\n {'user_form': user_form, 'profile_picture_form': profile_picture_form, 'registered': registered, 'match_password': match_password})\n\n\n@login_required\ndef updateProfile(request):\n registered = 'no'\n updated = 'no'\n p=0\n\n if request.method == 'POST':\n first_name = request.POST.get('first_name')\n last_name = request.POST.get('last_name')\n email = request.POST.get('email')\n mobileNo = request.POST.get('mobileNo')\n location = request.POST.get('location')\n\n if request.user.is_authenticated:\n t = User.objects.get(username=request.user)\n try:\n t2 = CustomerProfileInfo.objects.get(user=request.user)\n if t2.is_customer == True:\n profile_picture_form = customerProfilePicForm(request.POST, request.FILES)\n if profile_picture_form.is_valid():\n profile_pic = profile_picture_form.cleaned_data['profile_picture']\n else:\n print(profile_picture_form.errors)\n if first_name != \"\":\n t.first_name = first_name\n if last_name != \"\":\n t.last_name = last_name\n if email != \"\":\n t.email = email\n if mobileNo != \"\":\n t2.mobileNo = mobileNo\n if location != \"\":\n t2.location = location\n if profile_pic != \"/profiles_pic/Customer_profile_pic/demo_profile_pic2.png\":\n t2.profile_picture = profile_pic\n p=1\n t.save()\n t2.save()\n\n except CustomerProfileInfo.DoesNotExist:\n try:\n t2 = AdvertiserProfileInfo.objects.get(user=request.user)\n if t2.is_advertiser == True:\n profile_picture_form = advertiserProfilePicForm(request.POST, request.FILES)\n if profile_picture_form.is_valid():\n profile_pic = profile_picture_form.cleaned_data['profile_picture']\n else:\n print(profile_picture_form.errors)\n if first_name != \"\":\n t.first_name = first_name\n if last_name != \"\":\n t.last_name = last_name\n if email != \"\":\n t.email = email\n if mobileNo != \"\":\n t2.mobileNo = mobileNo\n if location != \"\":\n t2.location = location\n if profile_pic != \"/profiles_pic/Advertiser_profile_pic/demo_profile_pic2.png\":\n t2.profile_picture = profile_pic\n p = 1\n t.save()\n t2.save()\n\n except AdvertiserProfileInfo.DoesNotExist:\n try:\n t2 = CityCorporationProfileInfo.objects.get(user=request.user)\n if t2.is_cityCor == True:\n profile_picture_form = cityCorporationProfilePicForm(request.POST, request.FILES)\n if profile_picture_form.is_valid():\n profile_pic = profile_picture_form.cleaned_data['profile_picture']\n else:\n print(profile_picture_form.errors)\n if first_name != \"\":\n t.first_name = first_name\n if last_name != \"\":\n t.last_name = last_name\n if email != \"\":\n t.email = email\n if mobileNo != \"\":\n t2.mobileNo = mobileNo\n if location != \"\":\n t2.location = location\n if profile_pic != \"/profiles_pic/cityCor_profile_pic/demo_profile_pic2.png\":\n t2.profile_picture = profile_pic\n p = 1\n t.save()\n t2.save()\n\n except CityCorporationProfileInfo.DoesNotExist:\n return HttpResponse(\"Account is Not Actived.\")\n if first_name==\"\" and last_name==\"\" and email==\"\" and mobileNo==\"\" and location==\"\" and p==0:\n updated = 'all_are_null'\n else:\n updated = 'all_are_not_null'\n else:\n registered = 'not_registered'\n else:\n try:\n t2 = CustomerProfileInfo.objects.get(user=request.user)\n if t2.is_customer == True:\n profile_picture_form = customerProfilePicForm()\n\n except CustomerProfileInfo.DoesNotExist:\n try:\n t2 = AdvertiserProfileInfo.objects.get(user=request.user)\n if t2.is_advertiser == True:\n profile_picture_form = advertiserProfilePicForm()\n\n except AdvertiserProfileInfo.DoesNotExist:\n try:\n t2 = CityCorporationProfileInfo.objects.get(user=request.user)\n if t2.is_cityCor == True:\n profile_picture_form = cityCorporationProfilePicForm()\n\n except CityCorporationProfileInfo.DoesNotExist:\n return HttpResponse(\"Account is Not Active!\")\n return render(request, 'update_profile.html', {'profile_picture_form': profile_picture_form, 'registered': registered, 'updated': updated, 't2':t2})\n\n\n@login_required\ndef viewProfile(request):\n profile = 0\n if request.user.is_authenticated:\n user = User.objects.get(username=request.user)\n try:\n profile = CustomerProfileInfo.objects.get(user=request.user)\n if profile.is_customer == True:\n confirmed_posts = confirm_post.objects.filter(confirmed_by=user)\n confirmed_post_count = confirmed_posts.count()\n return render(request, 'view_profile.html', {'profile': profile, 'user': user, 'confirmed_post_count': confirmed_post_count})\n except CustomerProfileInfo.DoesNotExist:\n try:\n profile = AdvertiserProfileInfo.objects.get(user=request.user)\n if profile.is_advertiser == True:\n posts = PostAdvertiseTable.objects.filter(author=user)\n post_count = posts.count()\n confirmed_posts = confirm_post.objects.filter(advertiser=user)\n confirmed_post_count = confirmed_posts.count()\n return render(request, 'view_profile.html', {'profile': profile, 'user': user, 'post_count': post_count, 'confirmed_post_count': confirmed_post_count})\n except AdvertiserProfileInfo.DoesNotExist:\n try:\n profile = CityCorporationProfileInfo.objects.get(user=request.user)\n if profile.is_cityCor == True:\n return render(request, 'view_profile.html', {'profile': profile, 'user': user})\n except CityCorporationProfileInfo.DoesNotExist:\n return render(request, 'view_profile.html', {'profile': profile, 'user': user})\n\n profile = 0\n user = 0\n return render(request, 'view_profile.html', {'profile': profile, 'user': user})\n\n# def change_password(request):\n# if request.method == 'POST':\n# if request.user.is_authenticated:\n# t = User.objects.get(username=request.user)\n# form = PasswordChangeForm(t, request.POST)\n# if form.is_valid():\n# user = form.save()\n# # update_session_auth_hash(request, user)\n# # messages.success(request, 'Your password was successfully updated!')\n# # return redirect('change_password')\n# else:\n# messages.error(request, 'Please correct the error below.')\n# else:\n# form = PasswordChangeForm(request.user)\n# return render(request, 'user/change_password.html', {\n# 'form': form\n# })\n\n@login_required\ndef change_password(request):\n updated = 'no'\n if request.user.is_authenticated:\n\n form = changePassForm(request.POST or None)\n\n old_password = request.POST.get(\"old_password\")\n new_password = request.POST.get(\"new_password\")\n re_new_password = request.POST.get(\"re_new_password\")\n if request.POST.get(\"old_password\"):\n\n user = User.objects.get(username=request.user.username)\n\n if user.check_password('{}'.format(old_password)) == False:\n form.set_old_password_flag()\n if re_new_password != new_password:\n form.set_re_new_password_flag()\n\n if form.is_valid():\n user.set_password('{}'.format(new_password))\n user.save()\n updated = 'yes'\n update_session_auth_hash(request, user)\n # return redirect('change_password')\n return render(request, 'change_password.html', {\"form\": form, \"updated\": updated})\n else:\n return render(request, 'change_password.html', {\"form\": form, \"updated\": updated})\n else:\n return redirect('user_login')\n\n# @login_required\ndef current_price_update(request):\n updated = False\n\n if request.method == 'POST':\n\n location = request.POST.get('location')\n min_price = request.POST.get('min_price')\n max_price = request.POST.get('max_price')\n t = CurrentPriceUpdate()\n t.location = location\n t.min_price = min_price\n t.max_price = max_price\n t.save()\n\n updated = True\n\n return render(request, 'update_current_price.html',{'updated': updated})\n\n\ndef current_price_view(request):\n view_current_price = \"xyz\"\n if request.method == 'POST':\n location = request.POST.get('location')\n view_current_price = CurrentPriceUpdate.objects.filter(location=location)\n print(view_current_price)\n if not view_current_price:\n view_current_price = \"no_data\"\n return render(request, 'view_current_price.html', {'filter': view_current_price})\n\n return render(request, 'view_current_price.html', {'filter': view_current_price})\n\n\n# @login_required\ndef advertise_post_form(request):\n form_of_post = post_form(request.POST, request.FILES or None)\n posted = 'no'\n if form_of_post.is_valid():\n instance = form_of_post.save(commit=False)\n instance.author = request.user\n instance.save()\n form_of_post = post_form()\n posted = 'yes'\n\n context = {\n 'form_of_post':form_of_post,\n 'posted':posted\n }\n return render(request, 'post_form.html', context)\n\n@login_required\ndef update_post_form(request):\n registered = 'no'\n updated = 'no'\n different_advertiser = 'no'\n post_code = 1\n p = 0\n billboard_pic_form = billboardPicForm()\n\n if request.method == 'POST':\n code = request.POST.get('code')\n title = request.POST.get('title')\n location = request.POST.get('location')\n Spec_loc = request.POST.get('Spec_loc')\n width = request.POST.get('width')\n height = request.POST.get('height')\n size = request.POST.get('size')\n price = request.POST.get('price')\n short_desc = request.POST.get('short_desc')\n\n if request.user.is_authenticated:\n try:\n t = PostAdvertiseTable.objects.get(code=code)\n\n if t.author == request.user:\n billboard_pic_form = billboardPicForm(request.POST, request.FILES)\n if billboard_pic_form.is_valid():\n billboard_pic = billboard_pic_form.cleaned_data['posted_billboards_pic']\n else:\n print(billboard_pic_form.errors)\n if title != \"\":\n t.title = title\n if location != \"\":\n t.location = location\n if Spec_loc != \"\":\n t.Spec_loc = Spec_loc\n if width != \"\":\n t.width = width\n if height != \"\":\n t.height = height\n if size != \"\":\n t.size = size\n if price != \"\":\n t.price = price\n if short_desc != \"\":\n t.short_desc = short_desc\n if billboard_pic != \"/posted_billboards_pic/billboards_images/demo_billboard_image.JPG\":\n t.posted_billboards_pic = billboard_pic\n p = 1\n t.save()\n if title == \"\" and location == \"\" and Spec_loc == \"\" and width == \"\" and height == \"\" and size == \"\" and price == \"\" and short_desc == \"\" and p == 0:\n updated = 'all_are_null'\n else:\n updated = 'all_are_not_null'\n else:\n different_advertiser = 'yes'\n except:\n post_code = 0\n else:\n registered = 'not_registered'\n\n # else:\n # billboard_pic_form = billboardPicForm()\n context = {\n 'billboard_pic_form': billboard_pic_form,\n 'registered': registered,\n 'updated': updated,\n 'diff_advertiser': different_advertiser,\n 'post_code': post_code,\n # 'code': code\n }\n return render(request, 'update_post_form.html', context)\n\n\n\n# def post_save(request):\n#\n# if request.method == \"POST\":\n# title = request.POST.get('title')\n# Spec_loc = request.POST.get('location')\n# size = request.POST.get('bill_size')\n# price = request.POST.get('price')\n# short_desc = request.POST.get('desc')\n#\n# mydata = Post_Advertise_table()\n#\n# mydata.title = title\n# mydata.spec_loc = Spec_loc\n# mydata.size = size\n# mydata.price = price\n# mydata.short_desc = short_desc\n#\n# mydata.save()\n# return redirect('advertiserPanel')\n# else:\n#\n# return render(request, 'post_form.html')\n\n\ndef sizeMoneyCalculation(request):\n return render(request, 'sizeMoneyCalculation.html')\n\ndef conv(request):\n num = \"no\"\n try:\n val1 = int(request.GET['num1'])\n val2 = int(request.GET['num2'])\n except:\n return render(request, 'convert.html', {'num': num})\n num = \"yes\"\n res = val1 * val2\n return render(request, 'convert.html', {'result': res, 'size': val2, 'num': num})\n\n\n@login_required\ndef viewPost(request):\n allPosts = PostAdvertiseTable.objects.all().order_by('-post_date')\n allConfirmedposts = confirm_post.objects.all()\n profile = 0\n # print(allPosts)\n # print(allConfirmedposts)\n\n billboard_filter = billboardFilter(request.GET, queryset=allPosts)\n try:\n profile = CustomerProfileInfo.objects.get(user=request.user)\n except CustomerProfileInfo.DoesNotExist:\n try:\n profile = AdvertiserProfileInfo.objects.get(user=request.user)\n except AdvertiserProfileInfo.DoesNotExist:\n try:\n profile = CityCorporationProfileInfo.objects.get(user=request.user)\n except CityCorporationProfileInfo.DoesNotExist:\n msg = \"There was an error\"\n print(msg)\n has_filter = any(field in request.GET for field in set(billboard_filter.get_fields()))\n if request.method == 'GET':\n if 'all_post' in request.GET:\n all_Posts = PostAdvertiseTable.objects.all().order_by('-post_date')\n billboard_filter = billboardFilter(request.GET, queryset=all_Posts)\n if 'my_post' in request.GET:\n myPosts = PostAdvertiseTable.objects.filter(author=request.user).order_by('-post_date')\n billboard_filter = billboardFilter(request.GET, queryset=myPosts)\n if 'my_deals' in request.GET:\n profile2 = confirm_post.objects.get(confirmed_by=request.user)\n myDeals = PostAdvertiseTable.objects.filter(code=profile2.adCode).order_by('-confirmed_date')\n billboard_filter = billboardFilter(request.GET, queryset=myDeals)\n context = {'allPosts': allPosts, 'allConfirmedposts': allConfirmedposts, 'user': request.user, 'filter': billboard_filter, 'profile': profile}\n # context1 = {'allConfirmedposts': allConfirmedposts}\n return render(request, 'viewPost.html', context)\n\n@login_required\ndef postDetail(request):\n form_of_post = confirm_post_form(request.POST, request.FILES or None)\n post_code = 1\n posted = 'no'\n msg = 'no'\n profile = 0\n if form_of_post.is_valid():\n try:\n profile = CustomerProfileInfo.objects.get(user=request.user)\n if profile.is_customer == True:\n adCode = form_of_post.cleaned_data['adCode']\n try:\n code = PostAdvertiseTable.objects.get(code=adCode)\n instance = form_of_post.save(commit=False)\n instance.confirmed_by = request.user\n instance.advertiser = code.author\n instance.save()\n form_of_post = confirm_post_form()\n posted = 'yes'\n except:\n post_code = 0\n except CustomerProfileInfo.DoesNotExist:\n try:\n profile = AdvertiserProfileInfo.objects.get(user=request.user)\n if profile.is_advertiser == True:\n msg = \"You are an advertiser!\"\n except AdvertiserProfileInfo.DoesNotExist:\n try:\n profile = CityCorporationProfileInfo.objects.get(user=request.user)\n if profile.is_cityCor == True:\n msg = \"You are city corporation!\"\n except CityCorporationProfileInfo.DoesNotExist:\n msg = \"There was an error\"\n context = {\n 'form_of_post': form_of_post,\n 'posted': posted,\n 'msg': msg,\n 'profile': profile,\n 'post_code': post_code\n }\n return render(request, 'postDetail.html', context)\n\n@login_required\ndef deletePost1(request, c):\n event = PostAdvertiseTable.objects.get(pk=c)\n event.delete()\n # event1=confirm_post.objects.get(adCode=c)\n # event1.delete()\n\n try:\n # obj = A.objects.get(name='John')\n if confirm_post.objects.filter(adCode=c).exists():\n event1 = confirm_post.objects.get(adCode=c)\n event1.delete()\n except:\n pass\n\n # event1 = confirm_post.objects.get(pk=code)\n # event1.delete()\n return redirect('viewPost')\n\n# @login_required\n# def viewAdvertisersRecords(request):\n# allPosts = PostAdvertiseTable.objects.values('author').distinct()\n# # allConfirmedposts = confirm_post.objects.all()\n#\n# return render(request, 'view_advertisers_records.html', {'allPosts': allPosts})\n\ndef myPanel(request):\n profile = 0\n if request.user.is_authenticated:\n try:\n profile = CustomerProfileInfo.objects.get(user=request.user)\n if profile.is_customer == True:\n return HttpResponseRedirect(reverse('customerPanel'))\n except CustomerProfileInfo.DoesNotExist:\n try:\n profile = AdvertiserProfileInfo.objects.get(user=request.user)\n if profile.is_advertiser == True:\n return HttpResponseRedirect(reverse('advertiserPanel'))\n except AdvertiserProfileInfo.DoesNotExist:\n try:\n profile = CityCorporationProfileInfo.objects.get(user=request.user)\n if profile.is_cityCor == True:\n return HttpResponseRedirect(reverse('cityCorporationPanel'))\n except CityCorporationProfileInfo.DoesNotExist:\n if request.user.is_staff:\n return HttpResponseRedirect(reverse('staffPanel'))\n else:\n msg = \"User is not logged in\"\n print(msg)\n else:\n return render(request, 'user_login.html', {'profile': profile})\n\n@login_required\ndef viewCurrentDealRecords(request):\n allPosts = confirm_post.objects.all()\n\n return render(request, 'view_current_deal_records.html', {'allPosts': allPosts})\n\n@login_required\ndef viewAdveriserRecords(request):\n allPosts = AdvertiserProfileInfo.objects.all()\n\n return render(request, 'view_advertiser_records.html', {'allPosts': allPosts})\n\n@login_required\ndef viewCustomerRecords(request):\n allPosts = CustomerProfileInfo.objects.all()\n\n return render(request, 'view_customer_records.html', {'allPosts': allPosts})\n\ndef viewRecords(request):\n return render(request, 'view_records.html')\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.7238313555717468, "alphanum_fraction": 0.7269859313964844, "avg_line_length": 56.11475372314453, "blob_id": "e6f8ef58e517ad31d78060f662e398bc643865ec", "content_id": "345e07a97c2228b57d7d78ca178ed36424ef451e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3487, "license_type": "no_license", "max_line_length": 101, "num_lines": 61, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/urls.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "\"\"\"Billboard_Advertisement URL Configuration\n\nThe `urlpatterns` list routes URLs to views. For more information please see:\n https://docs.djangoproject.com/en/2.2/topics/http/urls/\nExamples:\nFunction views\n 1. Add an import: from my_app import views\n 2. Add a URL to urlpatterns: path('', views.home, name='home')\nClass-based views\n 1. Add an import: from other_app.views import Home\n 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')\nIncluding another URLconf\n 1. Import the include() function: from django.urls import include, path\n 2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))\n\"\"\"\nfrom django.contrib import admin\nfrom django.contrib.staticfiles.urls import staticfiles_urlpatterns\nfrom django.conf.urls.static import static\nfrom django.urls import path\nfrom . import views, settings\n\nurlpatterns = [\n path('admin/', admin.site.urls),\n path('',views.home,name='home'),\n path('about/',views.about, name=\"about\"),\n path('aboutUs/',views.aboutUs, name=\"aboutUs\"),\n path('base/',views.base,name='base'),\n path('register_customer/',views.register_customer,name='register_customer'),\n path('register_advertiser/',views.register_advertiser,name='register_advertiser'),\n path('register_cityCorporation/',views.register_cityCorporation,name='register_cityCorporation'),\n path('user_login/',views.user_login,name='user_login'),\n path('logout/', views.user_logout, name='logout'),\n path('staff_login/', views.staff_login, name='staff_login'),\n path('staffPanel/', views.staffPanel, name='staffPanel'),\n path('customerPanel/',views.customerPanel,name='customerPanel'),\n path('advertiserPanel/',views.advertiserPanel,name='advertiserPanel'),\n path('cityCorporationPanel/',views.cityCorporationPanel,name='cityCorporationPanel'),\n path('sign_in_options/',views.sign_in_options, name=\"sign_in_options\"),\n path('update_profile/',views.updateProfile, name='updateProfile'),\n path('view_profile/',views.viewProfile, name='viewProfile'),\n path('password/',views.change_password, name='change_password'),\n path('advertise_post_form/',views.advertise_post_form, name='advertise_post_form'),\n path('update_post_form/',views.update_post_form, name='update_post_form'),\n # path('post_form/post_save',views.post_save, name='post_save'),\n path('sizeMoneyCalculation/', views.sizeMoneyCalculation, name='sizeMoneyCalculation'),\n path('sizeMoneyCalculation/conv/', views.conv, name='conv'),\n path('viewPost/', views.viewPost, name='viewPost'),\n path('postDetail/', views.postDetail, name='postDetail'),\n #path('deletePost/', views.deletePost, name='deletePost'),\n path('deletePost1/<c>', views.deletePost1, name='deletePost1'),\n path('current_price_update/',views.current_price_update, name='current_price_update'),\n path('current_price_view/',views.current_price_view, name='current_price_view'),\n # path('viewAdvertisersRecords/',views.viewAdvertisersRecords, name='viewAdvertisersRecords'),\n path('myPanel/',views.myPanel, name='myPanel'),\n path('viewCurrentDealRecords/',views.viewCurrentDealRecords, name='viewCurrentDealRecords'),\n path('viewAdveriserRecords/',views.viewAdveriserRecords, name='viewAdveriserRecords'),\n path('viewCustomerRecords/',views.viewCustomerRecords, name='viewCustomerRecords'),\n path('viewRecords/',views.viewRecords, name='viewRecords'),\n\n\n]+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n\n\n\n" }, { "alpha_fraction": 0.6817138195037842, "alphanum_fraction": 0.6817138195037842, "avg_line_length": 35.11111068725586, "blob_id": "9134ca95a5d8597a875a50ec30cd9d3c69924e79", "content_id": "9c582b80cf953227fd7be52c906cad4f6855aadd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1307, "license_type": "no_license", "max_line_length": 112, "num_lines": 36, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/tests/test_urls.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "\nfrom django.test import SimpleTestCase\nfrom django.urls import reverse, resolve\nfrom Billboard_Advertisement.views import register_customer, home, sizeMoneyCalculation, viewPost, postDetail, \\\n advertise_post_form\n\n\nclass TestUrls(SimpleTestCase):\n def test_home_url_is_resolved(self):\n url = reverse('home')\n print(resolve(url))\n self.assertEquals(resolve(url).func, home)\n\n def test_register_customer_url_is_resolved(self):\n url = reverse('register_customer')\n print(resolve(url))\n self.assertEquals(resolve(url).func, register_customer)\n\n def test_sizeMoneyCalculation_url_is_resolved(self):\n url = reverse('sizeMoneyCalculation')\n print(resolve(url))\n self.assertEquals(resolve(url).func, sizeMoneyCalculation)\n\n def test_viewPost_url_is_resolved(self):\n url = reverse('viewPost')\n print(resolve(url))\n self.assertEquals(resolve(url).func, viewPost)\n\n def test_post_form_is_resolved(self):\n url = reverse('advertise_post_form')\n print(resolve(url))\n self.assertEquals(resolve(url).func, advertise_post_form)\n\n def test_postDetail_is_resolved(self):\n url = reverse('postDetail')\n print(resolve(url))\n self.assertEquals(resolve(url).func, postDetail)\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.6149762272834778, "alphanum_fraction": 0.634566068649292, "avg_line_length": 41.578125, "blob_id": "5d5ec1518394960a2f143347497d89fdd7372f39", "content_id": "c35a75fa38c5e711d55296adb11aef2ac448c8d1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5462, "license_type": "no_license", "max_line_length": 134, "num_lines": 128, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/tests/test_views.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "import datetime\n\nfrom django.contrib.auth import get_user_model\nfrom django.test import TestCase\nfrom django.urls import reverse\nfrom django.utils import timezone\n\nfrom Billboard_Advertisement.models import CurrentPriceUpdate, PostAdvertiseTable, CustomerProfileInfo\n\n\nclass TestViews(TestCase):\n\n # def setUp(self):\n # self.user = get_user_model().objects.create_user(\n # username='testuser', password='secret', first_name='Samir', last_name='Asif', email='[email protected]')\n #\n # self.update = CurrentPriceUpdate.objects.create(location='Khulna', min_price=15.0, max_price=15.0, update_date='2021-08-21')\n\n def test_current_price_update_view_POST(self):\n response = self.client.post(reverse('current_price_update'), data={\n 'location': 'Khulna',\n 'min_price': '12.5',\n 'max_price': '16.5',\n 'update_date': timezone.now,\n })\n post = CurrentPriceUpdate.objects.last()\n self.assertEqual(CurrentPriceUpdate.objects.count(), 1)\n self.assertEquals(response.status_code, 200)\n self.assertEquals(post.location, 'Khulna')\n self.assertEquals(post.min_price, 12.5)\n self.assertEquals(post.max_price, 16.5)\n self.assertEquals(post.update_date, datetime.date.today())\n self.assertTemplateUsed(response, 'update_current_price.html')\n\n def test_current_price_view_view(self):\n response = self.client.post(reverse('current_price_view'))\n self.assertEquals(response.status_code, 200)\n self.assertTemplateUsed(response, 'view_current_price.html')\n\n def test_advertise_post_form_view(self):\n response = self.client.get(reverse('advertise_post_form'))\n self.assertEquals(response.status_code, 200)\n self.assertTemplateUsed(response, 'post_form.html')\n\n def test_register_customer_view(self):\n response = self.client.get(reverse('register_customer'))\n self.assertEqual(response.status_code, 200)\n # self.assertContains(response, '')\n self.assertTemplateUsed(response, 'customer_registration.html')\n\n def test_register_advertiser_view(self):\n response = self.client.get(reverse('register_advertiser'))\n self.assertEqual(response.status_code, 200)\n # self.assertContains(response, '')\n self.assertTemplateUsed(response, 'advertiser_registration.html')\n\n def test_register_cityCorporation_view(self):\n response = self.client.get(reverse('register_cityCorporation'))\n self.assertEqual(response.status_code, 200)\n # self.assertContains(response, '')\n self.assertTemplateUsed(response, 'govt_registration.html')\n\n\n\n\n\n\n\n\n # def test_advertise_post_form_view_POST(self):\n # response = self.client.post(reverse('advertise_post_form'), data={\n # 'code': \"0013\",\n # 'title': \"test_title\",\n # 'location': \"Dhaka\",\n # 'Spec_loc': \"Badda\",\n # 'width': \"12.0\",\n # 'height': \"8.0\",\n # 'price': \"1000\",\n # 'short_desc': \"This is a billboard\",\n # 'posted_billboards_pic': \"/posted_billboards_pic/billboards_images/demo_billboard_image.JPG\"\n # })\n # post = PostAdvertiseTable.objects.last()\n # self.assertEqual(PostAdvertiseTable.objects.count(), 1)\n # self.assertEquals(response.status_code, 302)\n # self.assertEquals(post.code, '0013')\n # self.assertEquals(post.title, 'test_title')\n # self.assertEquals(post.location, 'Badda')\n # self.assertEquals(post.width, 12.0)\n # self.assertEquals(post.height, 8.0)\n # self.assertEquals(post.price, 1000)\n # self.assertEquals(post.short_desc, 'This is a billboard')\n # self.assertEquals(post.posted_billboards_pic, \"/posted_billboards_pic/billboards_images/demo_billboard_image.JPG\")\n # self.assertTemplateUsed(response, 'post_form.html')\n\n\n # def test_register_customer_view_POST(self):\n # response = self.client.post(reverse('register_customer'), data={\n # 'user': self.user.username,\n # 'location': \"Dhaka\",\n # 'mobileNo': \"+8801845430242\",\n # 'is_customer': True,\n # 'profile_picture': \"/profiles_pic/cityCor_profile_pic/demo_profile_pic2.png\"\n # })\n # post = CustomerProfileInfo.objects.last()\n # self.assertEqual(CustomerProfileInfo.objects.count(), 1)\n # self.assertEquals(response.status_code, 200)\n # self.assertEquals(post.user, self.user.username)\n # self.assertEquals(post.location, 'Dhaka')\n # self.assertEquals(post.mobileNo, '+8801845430242')\n # self.assertEquals(post.is_customer, 'True')\n # self.assertEquals(post.profile_picture, \"/profiles_pic/cityCor_profile_pic/demo_profile_pic2.png\")\n # self.assertTemplateUsed(response, 'customer_registration.html')\n\n\n# class LoginTest(TestCase):\n# def setUp(self):\n# self.credentials = {\n# 'username': 'testuser',\n# 'password': 'secret',\n# 'first_name': 'Samir',\n# 'last_name': 'Asif',\n# 'email': '[email protected]'\n# }\n# User.objects.create_user(**self.credentials)\n# def test_login(self):\n# response = self.client.post('/user_login/', self.credentials, follow=True)\n# print(response.context['user'])\n# self.assertTrue(response.context['user'].is_active)\n\n\n\n\n\n\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.5768081545829773, "alphanum_fraction": 0.5965331792831421, "avg_line_length": 34.59574508666992, "blob_id": "44ec7b6a9474ae1b26a01ea1493d52b1d18c0af6", "content_id": "7556471269c2d48e0fc4ebe61dae268272cb6fa3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1673, "license_type": "no_license", "max_line_length": 111, "num_lines": 47, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/migrations/0004_auto_20210901_2322.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2 on 2021-09-01 17:22\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('Billboard_Advertisement', '0003_auto_20210829_1240'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='postadvertisetable',\n name='id',\n ),\n migrations.AlterField(\n model_name='advertiserprofileinfo',\n name='id',\n field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),\n ),\n migrations.AlterField(\n model_name='citycorporationprofileinfo',\n name='id',\n field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),\n ),\n migrations.AlterField(\n model_name='confirm_post',\n name='id',\n field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),\n ),\n migrations.AlterField(\n model_name='currentpriceupdate',\n name='id',\n field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),\n ),\n migrations.AlterField(\n model_name='customerprofileinfo',\n name='id',\n field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),\n ),\n migrations.AlterField(\n model_name='postadvertisetable',\n name='code',\n field=models.CharField(max_length=100, primary_key=True, serialize=False),\n ),\n ]\n" }, { "alpha_fraction": 0.5278891921043396, "alphanum_fraction": 0.7109004855155945, "avg_line_length": 17.286666870117188, "blob_id": "4990819e326c8a690597a849f3d4575cece67509", "content_id": "cdfa09a94cb0e9cac7407f5ba6131244e232c7c4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 2743, "license_type": "no_license", "max_line_length": 42, "num_lines": 150, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/requirements.txt", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "alabaster==0.7.12\narrow==1.1.1\nasgiref==3.4.1\nastroid==2.6.6\natomicwrites==1.4.0\nattrs==21.2.0\nautomium==0.2.6\nautomium-web==0.1.1\nautopep8==1.5.7\nBabel==2.9.1\nbackcall==0.2.0\nbcrypt==3.2.0\nbinaryornot==0.4.4\nblack==21.9b0\nbleach==4.1.0\ncertifi==2021.5.30\ncffi==1.14.6\nchardet==4.0.0\ncharset-normalizer==2.0.6\nclick==8.0.1\ncloudinary==1.26.0\ncloudpickle==2.0.0\ncolorama==0.4.4\ncookiecutter==1.7.3\ncryptography==3.4.8\ndebugpy==1.4.3\ndecorator==5.1.0\ndefusedxml==0.7.1\ndiff-match-patch==20200713\ndj-database-url==0.5.0\nDjango==3.2.5\ndjango-cloudinary-storage==0.3.0\ndjango-crispy-forms==1.12.0\ndjango-filter==2.4.0\ndjango-heroku==0.3.1\ndocutils==0.17.1\nentrypoints==0.3\nflake8==3.9.2\nFlask==2.0.1\ngunicorn==20.1.0\nidna==3.2\nimagesize==1.2.0\nimportlib-metadata==4.8.1\ninflection==0.5.1\nintervaltree==3.1.0\nipykernel==6.4.1\nipython==7.27.0\nipython-genutils==0.2.0\nisort==5.9.3\nitsdangerous==2.0.1\njedi==0.18.0\nJinja2==3.0.1\njinja2-time==0.2.0\njsonschema==3.2.0\njupyter-client==6.1.12\njupyter-core==4.8.1\njupyterlab-pygments==0.1.2\nkeyring==23.2.1\nlazy-object-proxy==1.6.0\nlegacy==0.1.6\nMarkupSafe==2.0.1\nmatplotlib-inline==0.1.3\nmccabe==0.6.1\nmistune==0.8.4\nmypy-extensions==0.4.3\nnbclient==0.5.4\nnbconvert==6.1.0\nnbformat==5.1.3\nnest-asyncio==1.5.1\nnumpydoc==1.1.0\npackaging==21.0\npandocfilters==1.5.0\nparamiko==2.7.2\nparso==0.8.2\npathspec==0.9.0\npexpect==4.8.0\npickleshare==0.7.5\nPillow==8.3.0\nplatformdirs==2.3.0\npluggy==1.0.0\npoyo==0.5.0\nprompt-toolkit==3.0.20\npsutil==5.8.0\npsycopg2==2.9.1\nptyprocess==0.7.0\npycodestyle==2.7.0\npycparser==2.20\npydocstyle==6.1.1\npyflakes==2.3.1\nPygments==2.10.0\npylint==2.9.6\npyls-spyder==0.4.0\nPyNaCl==1.4.0\npyparsing==2.4.7\nPyQt5==5.12.3\nPyQt5-sip==12.9.0\nPyQtWebEngine==5.12.1\npyrsistent==0.18.0\npython-dateutil==2.8.2\npython-decouple==3.4\npython-lsp-black==1.0.0\npython-lsp-jsonrpc==1.0.0\npython-lsp-server==1.2.2\npython-slugify==5.0.2\npytz==2021.1\npywin32==301; platform_system == \"Windows\"\npywin32-ctypes==0.2.0\npyzmq==22.3.0\nQDarkStyle==3.0.2\nqstylizer==0.2.1\nQtAwesome==1.0.3\nqtconsole==5.1.1\nQtPy==1.11.2\nregex==2021.8.28\nrequests==2.26.0\nrope==0.20.1\nRtree==0.9.7\nsix==1.16.0\nsnowballstemmer==2.1.0\nsortedcontainers==2.4.0\nSphinx==4.2.0\nsphinxcontrib-applehelp==1.0.2\nsphinxcontrib-devhelp==1.0.2\nsphinxcontrib-htmlhelp==2.0.0\nsphinxcontrib-jsmath==1.0.1\nsphinxcontrib-qthelp==1.0.3\nsphinxcontrib-serializinghtml==1.1.5\nspyder==5.1.5\nspyder-kernels==2.1.1\nsqlparse==0.4.1\ntestpath==0.5.0\ntext-unidecode==1.3\ntextdistance==4.2.1\nthree-merge==0.1.1\ntinycss2==1.1.0\ntoml==0.10.2\ntomli==1.2.1\ntornado==6.1\ntraitlets==5.1.0\ntyping-extensions==3.10.0.2\nujson==4.2.0\nurllib3==1.26.7\nwatchdog==2.1.5\nwcwidth==0.2.5\nwebencodings==0.5.1\nWerkzeug==2.0.1\nwhitenoise==5.3.0\nwrapt==1.12.1\nyapf==0.31.0\nzipp==3.5.0\n" }, { "alpha_fraction": 0.5810077786445618, "alphanum_fraction": 0.5852712988853455, "avg_line_length": 39.89418029785156, "blob_id": "c9a607aba7c44cfbf3aaac83e69475b4b334bede", "content_id": "0582d53b06ba8f00b34db69b1f8909ed5580398f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7740, "license_type": "no_license", "max_line_length": 156, "num_lines": 189, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/forms.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "from django.utils import timezone\nfrom django import forms\nfrom django.contrib.auth.models import User\n\nfrom .models import CustomerProfileInfo, AdvertiserProfileInfo, CityCorporationProfileInfo, confirm_post, PostAdvertiseTable\n\n\nclass UserForm(forms.ModelForm):\n # password = forms.CharField(min_length=4, widget=forms.PasswordInput())\n password = forms.CharField(widget=forms.PasswordInput())\n confirm_password = forms.CharField(widget=forms.PasswordInput())\n class Meta():\n model = User\n fields = ('username', 'password', 'first_name', 'last_name', 'email')\n help_texts = {\n 'username': None,\n }\n\nclass customerProfilePicForm(forms.ModelForm):\n class Meta:\n model = CustomerProfileInfo\n fields = ('profile_picture',)\n labels = {\n \"profile_picture\": \"Profile Picture:\"\n }\n\nclass advertiserProfilePicForm(forms.ModelForm):\n class Meta:\n model = AdvertiserProfileInfo\n fields = ('profile_picture',)\n labels = {\n \"profile_picture\": \"Profile Picture:\"\n }\n\nclass cityCorporationProfilePicForm(forms.ModelForm):\n class Meta:\n model = CityCorporationProfileInfo\n fields = ('profile_picture',)\n labels = {\n \"profile_picture\": \"Profile Picture:\"\n }\n\nclass billboardPicForm(forms.ModelForm):\n class Meta:\n model = PostAdvertiseTable\n fields = ('posted_billboards_pic',)\n labels = {\n \"posted_billboards_pic\": \"Billboard Picture:\"\n }\n\nclass changePassForm(forms.Form):\n old_password_flag = True\n re_new_password_flag = True\n old_password = forms.CharField(label=\"Old Password\", min_length=4, widget=forms.PasswordInput(attrs={'placeholder': ' enter old password'}))\n new_password = forms.CharField(label=\"New Password\", min_length=4, widget=forms.PasswordInput(attrs={'placeholder': ' enter new password'}))\n re_new_password = forms.CharField(label=\"Re-type New Password\", min_length=4,widget=forms.PasswordInput(attrs={'placeholder': ' re-type new password'}))\n\n def set_old_password_flag(self):\n self.old_password_flag = False\n\n return 0\n\n def set_re_new_password_flag(self):\n self.re_new_password_flag = False\n\n return 0\n\n def clean_old_password(self, *args, **kwargs):\n old_password = self.cleaned_data.get('old_password')\n if not old_password:\n raise forms.ValidationError(\"You must enter your old password.\")\n if self.old_password_flag == False:\n raise forms.ValidationError(\"The old password that you have entered is wrong.\")\n if self.re_new_password_flag == False:\n raise forms.ValidationError(\"Re-typed new password did not match with the new password.\")\n\n return old_password\n\nclass post_form(forms.ModelForm):\n class Meta:\n model = PostAdvertiseTable\n fields = ('code', 'title', 'location', 'Spec_loc', 'width', 'height', 'price', 'short_desc', 'posted_billboards_pic')\n labels = {\n \"code\": \"Post Code:\",\n \"title\": \"Title:\",\n \"location\": \"District:\",\n \"Spec_loc\": \"Specific Location:\",\n \"width\": \"Width of Billboard:\",\n \"height\": \"Height of Billboard:\",\n \"price\": \"Rent:\",\n \"short_desc\": \"Short Description:\",\n \"posted_billboards_pic\": \"Billboard Picture:\"\n }\n widgets = {\n 'code': forms.TextInput(attrs={'placeholder': 'enter a code'}),\n 'title': forms.TextInput(attrs={'placeholder': 'enter title'}),\n 'Spec_loc': forms.TextInput(attrs={'placeholder': 'enter specific location'}),\n 'width': forms.TextInput(attrs={'placeholder': 'in sq. feet'}),\n 'height': forms.TextInput(attrs={'placeholder': 'in sq. feet'}),\n 'price': forms.TextInput(attrs={'placeholder': 'enter rent'}),\n 'short_desc': forms.Textarea(\n attrs={'rows': 6, 'cols': 50, 'placeholder': 'Write a short description here..'}),\n }\n\nclass confirm_post_form(forms.ModelForm):\n class Meta:\n model = confirm_post\n dealDuration = forms.DateField(initial=timezone.now())\n fields = ('adCode', 'dealDuration')\n labels = {\n # \"year\": \"Year:\",\n # \"month\": \"Month:\",\n # \"day\": \"Day:\",\n \"adCode\": \"Advertisement Code:\",\n \"dealDuration\": \"Deal Duration:\",\n\n }\n widgets = {\n 'adCode': forms.TextInput(attrs={'placeholder': ' enter code'}),\n 'dealDuration': forms.SelectDateWidget()\n }\n\n\n\n\n\n\n# class CustomerProfileInfoForm(forms.ModelForm):\n# class Meta():\n# model = CustomerProfileInfo\n# check = forms.BooleanField(required=True)\n# fields = ('mobileNo', 'location', 'Customer_profile_pic', 'is_customer')\n# labels = {\n# \"mobileNo\": \"Mobile No.:\",\n# \"location\": \"Location:\",\n# # \"dateofbirth\": \"Date of Birth:\",\n# \"Customer_profile_pic\": \"Profile Photo:\",\n# \"check\": \"Confirm\"\n# }\n# help_texts = {\n# 'mobileNo': '<small style=\"color:darkorange\">optional</small>',\n# 'location': '<small style=\"color:darkorange\">optional</small>',\n# # 'dateofbirth': '<small style=\"color:darkorange\">optional</small>',\n# # 'Customer_profile_pic': '<small style=\"color:teal\">optional</small>',\n# 'check': '<small style=\"color:darkorange\">mendatory</small>',\n# }\n# # widgets = {'dateofbirth': forms.SelectDateWidget(years=range(1900, 2021))}\n#\n# class AdvertiserProfileInfoForm(forms.ModelForm):\n# class Meta():\n# model = AdvertiserProfileInfo\n# check = forms.BooleanField(required=True)\n# fields = ('mobileNo', 'location', 'Advertiser_profile_pic', 'is_advertiser')\n# labels = {\n# \"mobileNo\": \"Mobile No.:\",\n# \"location\": \"Location:\",\n# # \"dateofbirth\": \"Date of Birth:\",\n# \"Advertiser_profile_pic\": \"Profile Photo:\",\n# \"check\": \"Confirm\"\n# }\n# help_texts = {\n# 'mobileNo': '<small style=\"color:darkorange\">optional</small>',\n# 'location': '<small style=\"color:darkorange\">optional</small>',\n# # 'dateofbirth': '<small style=\"color:darkorange\">optional</small>',\n# #'Advertiser_profile_pic': '<small style=\"color:teal\">optional</small>',\n# 'check': '<small style=\"color:darkorange\">mendatory</small>',\n# }\n# # widgets = {'dateofbirth': forms.SelectDateWidget(years=range(1900, 2021))}\n#\n# class CityCorporationProfileInfoForm(forms.ModelForm):\n# class Meta():\n# model = CityCorporationProfileInfo\n# check = forms.BooleanField(required=True)\n# fields = ('mobileNo', 'location', 'cityCor_profile_pic', 'is_cityCor')\n# labels = {\n# \"mobileNo\": \"Mobile No.:\",\n# \"location\": \"Location:\",\n# # \"dateofbirth\": \"Date of Birth:\",\n# \"cityCor_profile_pic\": \"Profile Photo:\",\n# \"check\": \"Confirm\"\n# }\n# help_texts = {\n# 'mobileNo': '<small style=\"color:darkorange\">optional</small>',\n# 'location': '<small style=\"color:darkorange\">optional</small>',\n# # 'dateofbirth': '<small style=\"color:darkorange\">optional</small>',\n# #'cityCor_profile_pic': '<small style=\"color:teal\">optional</small>',\n# 'check': '<small style=\"color:darkorange\">mendatory</small>',\n# }\n# # widgets = {'dateofbirth': forms.SelectDateWidget(years=range(1900, 2021))}\n\n\n\n\n\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.5147058963775635, "alphanum_fraction": 0.6764705777168274, "avg_line_length": 20.33333396911621, "blob_id": "aa0c85625a664048142f96213a040734dcf2bf3b", "content_id": "3c7b19755ff85434312e4b858d41aca0064030ff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 68, "license_type": "no_license", "max_line_length": 27, "num_lines": 3, "path": "/requirements.txt", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "Django==2.2.17\ndjango_filter==2.4.0\ndjango-crispy-forms==1.12.0 \n\n\n\n" }, { "alpha_fraction": 0.5357142686843872, "alphanum_fraction": 0.6142857074737549, "avg_line_length": 22.33333396911621, "blob_id": "7372c56ba27b884ba81cedf9706dd7056ee29f5b", "content_id": "42846522bd07c34d21c36f78020f94c029b954f4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 420, "license_type": "no_license", "max_line_length": 63, "num_lines": 18, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/migrations/0007_advertiserprofileinfo_num_of_post.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.17 on 2021-09-11 07:09\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('Billboard_Advertisement', '0006_auto_20210911_1251'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='advertiserprofileinfo',\n name='num_of_post',\n field=models.IntegerField(default=0),\n ),\n ]\n" }, { "alpha_fraction": 0.5327102541923523, "alphanum_fraction": 0.6144859790802002, "avg_line_length": 22.77777862548828, "blob_id": "6bcb0562c5141c7e5c2def9081fb159565034a13", "content_id": "427a519399980886b8c0c2d1d98aee757d39fd1d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 428, "license_type": "no_license", "max_line_length": 65, "num_lines": 18, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/migrations/0006_auto_20210911_1251.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.17 on 2021-09-11 06:51\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('Billboard_Advertisement', '0005_auto_20210911_1229'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='confirm_post',\n name='advertiser',\n field=models.CharField(default=None, max_length=100),\n ),\n ]\n" }, { "alpha_fraction": 0.63960200548172, "alphanum_fraction": 0.6626987457275391, "avg_line_length": 49.89140319824219, "blob_id": "a292ef5e955e2daae97c4192ca17dc454ef845be", "content_id": "c41ce8c6a258e9f67d883a66f786b27dc10033b9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11257, "license_type": "no_license", "max_line_length": 166, "num_lines": 221, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/tests/test_models.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "from django.contrib.auth.models import User\nfrom django.test import TestCase, Client\n\n# Create your tests here.\nfrom django.urls import reverse\nfrom django.utils.datetime_safe import datetime\n\nfrom django.contrib.auth import get_user_model\n\nfrom Billboard_Advertisement.models import CustomerProfileInfo, AdvertiserProfileInfo, CityCorporationProfileInfo, \\\n CurrentPriceUpdate, PostAdvertiseTable, confirm_post\n\n\nclass CustomerProfileTest(TestCase):\n\n def setUp(self):\n self.user = get_user_model().objects.create_user(\n username='testuser', password='secret', first_name='Samir', last_name='Asif', email='[email protected]')\n CustomerProfileInfo.objects.create(user=self.user, currentdate='2021-08-21', location='Dhaka',\n mobileNo='+8801845430242', is_customer=True, profile_picture='/profiles_pic/cityCor_profile_pic/demo_profile_pic2.png')\n\n def test_content(self):\n userInfo = CustomerProfileInfo.objects.get(id=1)\n expected_object_user = f'{userInfo.user}'\n expected_object_currentdate = f'{userInfo.currentdate}'\n expected_object_location = f'{userInfo.location}'\n expected_object_mobileNo = f'{userInfo.mobileNo}'\n expected_object_is_customer = f'{userInfo.is_customer}'\n expected_object_profile_picture = f'{userInfo.profile_picture}'\n self.assertEquals(expected_object_user, self.user.username)\n self.assertEquals(expected_object_currentdate, '2021-08-21')\n self.assertEquals(expected_object_location, 'Dhaka')\n self.assertEquals(expected_object_mobileNo, '+8801845430242')\n self.assertEquals(expected_object_is_customer, 'True')\n self.assertEquals(expected_object_profile_picture, '/profiles_pic/cityCor_profile_pic/demo_profile_pic2.png')\n\n def test_is_customer_label(self):\n user = CustomerProfileInfo.objects.get(id=1)\n field_label = user._meta.get_field('is_customer').verbose_name\n self.assertEqual(field_label, 'is customer')\n\n def test_mobileNo_max_length(self):\n user = CustomerProfileInfo.objects.get(id=1)\n max_length = user._meta.get_field('mobileNo').max_length\n self.assertEqual(max_length, 14)\n\n def test_object_name_is_user(self):\n user = CustomerProfileInfo.objects.get(id=1)\n expected_object_name = f'{user.user}'\n self.assertEqual(str(user), expected_object_name)\n\n\nclass AdvertiserProfileTest(TestCase):\n\n def setUp(self):\n self.user = get_user_model().objects.create_user(\n username='testuser', password='secret', first_name='Samir', last_name='Asif', email='[email protected]')\n AdvertiserProfileInfo.objects.create(user=self.user, currentdate='2021-08-21', location='Dhaka',\n mobileNo='+8801845430242', is_advertiser=True, profile_picture='/profiles_pic/cityCor_profile_pic/demo_profile_pic2.png')\n\n def test_content(self):\n userInfo = AdvertiserProfileInfo.objects.get(id=1)\n expected_object_user = f'{userInfo.user}'\n expected_object_currentdate = f'{userInfo.currentdate}'\n expected_object_location = f'{userInfo.location}'\n expected_object_mobileNo = f'{userInfo.mobileNo}'\n expected_object_is_advertiser = f'{userInfo.is_advertiser}'\n expected_object_profile_picture = f'{userInfo.profile_picture}'\n self.assertEquals(expected_object_user, self.user.username)\n self.assertEquals(expected_object_currentdate, '2021-08-21')\n self.assertEquals(expected_object_location, 'Dhaka')\n self.assertEquals(expected_object_mobileNo, '+8801845430242')\n self.assertEquals(expected_object_is_advertiser, 'True')\n self.assertEquals(expected_object_profile_picture, '/profiles_pic/cityCor_profile_pic/demo_profile_pic2.png')\n\n\nclass GovtProfileTest(TestCase):\n\n def setUp(self):\n self.user = get_user_model().objects.create_user(\n username='testuser', password='secret', first_name='Samir', last_name='Asif', email='[email protected]')\n CityCorporationProfileInfo.objects.create(user=self.user, currentdate='2021-08-21', location='Dhaka',\n mobileNo='+8801845430242', is_cityCor=True,\n profile_picture='/profiles_pic/cityCor_profile_pic/demo_profile_pic2.png')\n\n def test_content(self):\n userInfo = CityCorporationProfileInfo.objects.get(id=1)\n expected_object_user = f'{userInfo.user}'\n expected_object_currentdate = f'{userInfo.currentdate}'\n expected_object_location = f'{userInfo.location}'\n expected_object_mobileNo = f'{userInfo.mobileNo}'\n expected_object_is_cityCor = f'{userInfo.is_cityCor}'\n expected_object_profile_picture = f'{userInfo.profile_picture}'\n self.assertEquals(expected_object_user, self.user.username)\n self.assertEquals(expected_object_currentdate, '2021-08-21')\n self.assertEquals(expected_object_location, 'Dhaka')\n self.assertEquals(expected_object_mobileNo, '+8801845430242')\n self.assertEquals(expected_object_is_cityCor, 'True')\n self.assertEquals(expected_object_profile_picture, '/profiles_pic/cityCor_profile_pic/demo_profile_pic2.png')\n\n\nclass UpdatePriceTest(TestCase):\n\n def setUp(self):\n CurrentPriceUpdate.objects.create(location='Dhaka', min_price=15.0, max_price=22.0, update_date='2021-08-21')\n\n def test_content(self):\n priceUpdate = CurrentPriceUpdate.objects.get(id=1)\n expected_object_location = f'{priceUpdate.location}'\n expected_object_min_price = f'{priceUpdate.min_price}'\n expected_object_max_price = f'{priceUpdate.max_price}'\n expected_object_update_date = f'{priceUpdate.update_date}'\n self.assertEquals(expected_object_location, 'Dhaka')\n self.assertEquals(expected_object_min_price, '15.0')\n self.assertEquals(expected_object_max_price, '22.0')\n self.assertEquals(expected_object_update_date, '2021-08-21')\n\n # def test_update_date_label(self):\n # price = CurrentPriceUpdate.objects.get(id=1)\n # field_label = price._meta.get_field('update_date').verbose_name\n # self.assertEqual(field_label, 'update date')\n\n def test_min_price_max_length(self):\n price = CurrentPriceUpdate.objects.get(id=1)\n max_length = price._meta.get_field('min_price').max_length\n self.assertEqual(max_length, 10000)\n\n # def test_object_name_is_update_date(self):\n # price = CurrentPriceUpdate.objects.get(id=1)\n # expected_object_name = f'{price.update_date}'\n # self.assertEqual(str(price), expected_object_name)\n\n\nclass PostAdvertiseTest(TestCase):\n\n def setUp(self):\n self.user = get_user_model().objects.create_user(\n username='testuser', password='secret', first_name='Samir', last_name='Asif', email='[email protected]')\n PostAdvertiseTable.objects.create(author=self.user, code='ab12', title='ad1', location='Dhaka',\n Spec_loc='Badda',\n width=12.0, height=8.0, size=96.0, price=10.0,\n short_desc='abcd', post_date='2021-08-21',\n posted_billboards_pic='/posted_billboards_pic/billboards_images/demo_billboard_image.JPG')\n\n def test_content(self):\n post = PostAdvertiseTable.objects.get(code='ab12')\n expected_object_author = f'{post.author}'\n expected_object_code = f'{post.code}'\n expected_object_title = f'{post.title}'\n expected_object_location = f'{post.location}'\n expected_object_Spec_loc = f'{post.Spec_loc}'\n expected_object_width = f'{post.width}'\n expected_object_height = f'{post.height}'\n expected_object_size = f'{post.size}'\n expected_object_price = f'{post.price}'\n expected_object_short_desc = f'{post.short_desc}'\n expected_object_post_date = f'{post.post_date}'\n expected_object_posted_billboards_pic = f'{post.posted_billboards_pic}'\n self.assertEquals(expected_object_author, self.user.username)\n self.assertEquals(expected_object_code, 'ab12')\n self.assertEquals(expected_object_title, 'ad1')\n self.assertEquals(expected_object_location, 'Dhaka')\n self.assertEquals(expected_object_Spec_loc, 'Badda')\n self.assertEquals(expected_object_width, '12.0')\n self.assertEquals(expected_object_height, '8.0')\n self.assertEquals(expected_object_size, '96.0')\n self.assertEquals(expected_object_price, '10.0')\n self.assertEquals(expected_object_short_desc, 'abcd')\n self.assertEquals(expected_object_post_date, '2021-08-21')\n self.assertEquals(expected_object_posted_billboards_pic,\n '/posted_billboards_pic/billboards_images/demo_billboard_image.JPG')\n\n # def test_Spec_loc_label(self):\n # post = PostAdvertiseTable.objects.get(id=1)\n # field_label = post._meta.get_field('Spec_loc').verbose_name\n # self.assertEqual(field_label, 'Spec loc')\n\n def test_short_desc_max_length(self):\n post = PostAdvertiseTable.objects.get(code='ab12')\n max_length = post._meta.get_field('short_desc').max_length\n self.assertEqual(max_length, 500)\n\n # def test_object_name_is_code(self):\n # post = PostAdvertiseTable.objects.get(id=1)\n # expected_object_name = f'{post.code}'\n # self.assertEqual(str(post), expected_object_name)\n\n\nclass confirm_postTest(TestCase):\n\n def setUp(self):\n self.user = get_user_model().objects.create_user(\n username='testuser', password='secret', first_name='Samir', last_name='Asif', email='[email protected]')\n confirm_post.objects.create(confirmed_by=self.user, dealDuration='2021-08-21', adCode='1234',\n advertiser='testadvertiser')\n\n def test_content(self):\n postConfirm = confirm_post.objects.get(id=1)\n expected_object_confirmed_by = f'{postConfirm.confirmed_by}'\n expected_object_dealDuration = f'{postConfirm.dealDuration}'\n expected_object_adCode = f'{postConfirm.adCode}'\n expected_object_advertiser = f'{postConfirm.advertiser}'\n self.assertEquals(expected_object_confirmed_by, self.user.username)\n self.assertEquals(expected_object_dealDuration, '2021-08-21')\n self.assertEquals(expected_object_adCode, '1234')\n self.assertEquals(expected_object_advertiser, 'testadvertiser')\n\n # def test_confirmed_by_label(self):\n # label = confirm_post.objects.get(id=1)\n # field_label = label._meta.get_field('confirmed_by').verbose_name\n # self.assertEqual(field_label, 'confirmed by')\n\n def test_adCode_max_length(self):\n length = confirm_post.objects.get(id=1)\n max_length = length._meta.get_field('adCode').max_length\n self.assertEqual(max_length, 10)\n\n # def test_object_name_is_adCode(self):\n # object_name = confirm_post.objects.get(id=1)\n # expected_object_name = f'{object_name.adCode}'\n # self.assertEqual(str(object_name), expected_object_name)\n\n\n\n\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.5287958383560181, "alphanum_fraction": 0.554973840713501, "avg_line_length": 24.46666717529297, "blob_id": "968c9b60873413763244e3c9eaafdafe6b9f468f", "content_id": "0dd826ce7f74c7135cfbfb818bfb1d18c8c5fb4f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 764, "license_type": "no_license", "max_line_length": 85, "num_lines": 30, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/migrations/0009_auto_20210913_1403.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.17 on 2021-09-13 08:03\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('Billboard_Advertisement', '0008_remove_advertiserprofileinfo_num_of_post'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='confirm_post',\n name='day',\n ),\n migrations.RemoveField(\n model_name='confirm_post',\n name='month',\n ),\n migrations.RemoveField(\n model_name='confirm_post',\n name='year',\n ),\n migrations.AddField(\n model_name='confirm_post',\n name='dealDuration',\n field=models.DateField(blank=True, null=True),\n ),\n ]\n" }, { "alpha_fraction": 0.533816397190094, "alphanum_fraction": 0.6111111044883728, "avg_line_length": 22, "blob_id": "2af5284e3fc9b7b48d76efe013459b70cc5b5bf4", "content_id": "025132445960d9ecd391d0241566d2b21c0b8ae5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 414, "license_type": "no_license", "max_line_length": 63, "num_lines": 18, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/migrations/0014_auto_20210913_1551.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.17 on 2021-09-13 09:51\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('Billboard_Advertisement', '0013_auto_20210913_1513'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='confirm_post',\n name='dealDuration',\n field=models.DateField(default=None),\n ),\n ]\n" }, { "alpha_fraction": 0.5388235449790955, "alphanum_fraction": 0.6141176223754883, "avg_line_length": 22.61111068725586, "blob_id": "386d5a856919523d3709a5465873a52ded1b2472", "content_id": "8aac8b0cb7d290a07ab52f0bba0319af57debea8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 425, "license_type": "no_license", "max_line_length": 63, "num_lines": 18, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/migrations/0012_auto_20210913_1513.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.17 on 2021-09-13 09:13\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('Billboard_Advertisement', '0011_auto_20210913_1512'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='confirm_post',\n name='dealDuration',\n field=models.DateField(default=None, null=True),\n ),\n ]\n" }, { "alpha_fraction": 0.5940030813217163, "alphanum_fraction": 0.603085994720459, "avg_line_length": 97.25806427001953, "blob_id": "0d2c15e7994fedc781a0c2713c3debd7ae27aee8", "content_id": "a8f52b0db5e2259714b39484f2a048a01e2d75cc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9138, "license_type": "no_license", "max_line_length": 797, "num_lines": 93, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/migrations/0001_initial.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.17 on 2021-08-28 05:26\n\nfrom django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport django.utils.timezone\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n migrations.swappable_dependency(settings.AUTH_USER_MODEL),\n ]\n\n operations = [\n migrations.CreateModel(\n name='CurrentPriceUpdate',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('location', models.CharField(choices=[('Dhaka', 'Dhaka'), ('Narayanganj', 'Narayanganj'), ('Gazipur', 'Gazipur'), ('Cumilla', 'Cumilla'), ('Chittagong', 'Chittagong'), ('Noakhali', 'Noakhali'), ('Jessore', 'Jessore'), ('Khulna', 'Khulna'), ('Barisal', 'Barisal'), ('Rajshahi', 'Rajshahi'), ('Sylhet', 'Sylhet'), ('Rangpur', 'Rangpur'), ('Feni', 'Feni'), ('Pabna', 'Pabna'), ('Faridpur', 'Faridpur'), ('Dinajpur', 'Dinajpur'), ('Coxs Bazar', 'Coxs Bazar'), ('Bogra', 'Bogra'), ('Tangail', 'Tangail'), ('Patuakhali', 'Patuakhali'), ('Lalmonirhat', 'Lalmonirhat'), ('Madaripur', 'Madaripur'), ('Naogaon', 'Naogaon'), ('Rajbari', 'Rajbari'), ('Narail', 'Narail'), ('Pirojpur', 'Pirojpur'), ('Sherpur', 'Sherpur'), ('Mars', 'Mars')], default='', max_length=30)),\n ('current_price', models.FloatField(default='0', max_length=10000)),\n ('update_date', models.DateField(default=django.utils.timezone.now)),\n ],\n ),\n migrations.CreateModel(\n name='PostAdvertiseTable',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('code', models.CharField(max_length=100, unique=True)),\n ('title', models.CharField(default=None, max_length=100)),\n ('location', models.CharField(choices=[('Dhaka', 'Dhaka'), ('Narayanganj', 'Narayanganj'), ('Gazipur', 'Gazipur'), ('Cumilla', 'Cumilla'), ('Chittagong', 'Chittagong'), ('Noakhali', 'Noakhali'), ('Jessore', 'Jessore'), ('Khulna', 'Khulna'), ('Barisal', 'Barisal'), ('Rajshahi', 'Rajshahi'), ('Sylhet', 'Sylhet'), ('Rangpur', 'Rangpur'), ('Feni', 'Feni'), ('Pabna', 'Pabna'), ('Faridpur', 'Faridpur'), ('Dinajpur', 'Dinajpur'), ('Coxs Bazar', 'Coxs Bazar'), ('Bogra', 'Bogra'), ('Tangail', 'Tangail'), ('Patuakhali', 'Patuakhali'), ('Lalmonirhat', 'Lalmonirhat'), ('Madaripur', 'Madaripur'), ('Naogaon', 'Naogaon'), ('Rajbari', 'Rajbari'), ('Narail', 'Narail'), ('Pirojpur', 'Pirojpur'), ('Sherpur', 'Sherpur'), ('Mars', 'Mars')], default='', max_length=30)),\n ('Spec_loc', models.CharField(default=None, max_length=100)),\n ('width', models.FloatField(default='0', max_length=100)),\n ('height', models.FloatField(default='0', max_length=100)),\n ('size', models.FloatField(default='0', max_length=100)),\n ('price', models.CharField(default=None, max_length=100)),\n ('short_desc', models.TextField(default=None, max_length=500)),\n ('post_date', models.DateField(default=django.utils.timezone.now)),\n ('posted_billboards_pic', models.ImageField(blank=True, default='/posted_billboards_pic/billboards_images/demo_billboard_image.JPG', upload_to='posted_billboards_pic/billboards_images')),\n ('author', models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),\n ],\n ),\n migrations.CreateModel(\n name='CustomerProfileInfo',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('currentdate', models.DateField(default=django.utils.timezone.now)),\n ('location', models.CharField(blank=True, choices=[('Dhaka', 'Dhaka'), ('Narayanganj', 'Narayanganj'), ('Gazipur', 'Gazipur'), ('Cumilla', 'Cumilla'), ('Chittagong', 'Chittagong'), ('Noakhali', 'Noakhali'), ('Jessore', 'Jessore'), ('Khulna', 'Khulna'), ('Barisal', 'Barisal'), ('Rajshahi', 'Rajshahi'), ('Sylhet', 'Sylhet'), ('Rangpur', 'Rangpur'), ('Feni', 'Feni'), ('Pabna', 'Pabna'), ('Faridpur', 'Faridpur'), ('Dinajpur', 'Dinajpur'), ('Coxs Bazar', 'Coxs Bazar'), ('Bogra', 'Bogra'), ('Tangail', 'Tangail'), ('Patuakhali', 'Patuakhali'), ('Lalmonirhat', 'Lalmonirhat'), ('Madaripur', 'Madaripur'), ('Naogaon', 'Naogaon'), ('Rajbari', 'Rajbari'), ('Narail', 'Narail'), ('Pirojpur', 'Pirojpur'), ('Sherpur', 'Sherpur'), ('Mars', 'Mars')], default='', max_length=30, null=True)),\n ('mobileNo', models.CharField(blank=True, default=None, max_length=14, null=True)),\n ('profile_picture', models.ImageField(blank=True, default='/profiles_pic/Customer_profile_pic/demo_profile_pic2.png', upload_to='profiles_pic/Customer_profile_pic/')),\n ('is_customer', models.BooleanField(default=False)),\n ('user', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),\n ],\n ),\n migrations.CreateModel(\n name='confirm_post',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('year', models.CharField(max_length=100)),\n ('month', models.CharField(max_length=100)),\n ('day', models.CharField(max_length=100)),\n ('adCode', models.CharField(max_length=100, unique=True)),\n ('advertiser', models.CharField(default=None, max_length=100)),\n ('confirmed_by', models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),\n ],\n ),\n migrations.CreateModel(\n name='CityCorporationProfileInfo',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('currentdate', models.DateField(default=django.utils.timezone.now)),\n ('location', models.CharField(blank=True, choices=[('Dhaka', 'Dhaka'), ('Narayanganj', 'Narayanganj'), ('Gazipur', 'Gazipur'), ('Cumilla', 'Cumilla'), ('Chittagong', 'Chittagong'), ('Noakhali', 'Noakhali'), ('Jessore', 'Jessore'), ('Khulna', 'Khulna'), ('Barisal', 'Barisal'), ('Rajshahi', 'Rajshahi'), ('Sylhet', 'Sylhet'), ('Rangpur', 'Rangpur'), ('Feni', 'Feni'), ('Pabna', 'Pabna'), ('Faridpur', 'Faridpur'), ('Dinajpur', 'Dinajpur'), ('Coxs Bazar', 'Coxs Bazar'), ('Bogra', 'Bogra'), ('Tangail', 'Tangail'), ('Patuakhali', 'Patuakhali'), ('Lalmonirhat', 'Lalmonirhat'), ('Madaripur', 'Madaripur'), ('Naogaon', 'Naogaon'), ('Rajbari', 'Rajbari'), ('Narail', 'Narail'), ('Pirojpur', 'Pirojpur'), ('Sherpur', 'Sherpur'), ('Mars', 'Mars')], default='', max_length=30, null=True)),\n ('mobileNo', models.CharField(blank=True, default=None, max_length=14, null=True)),\n ('profile_picture', models.ImageField(blank=True, default='/profiles_pic/cityCor_profile_pic/demo_profile_pic2.png', upload_to='profiles_pic/cityCor_profile_pic')),\n ('is_cityCor', models.BooleanField(default=False)),\n ('user', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),\n ],\n ),\n migrations.CreateModel(\n name='AdvertiserProfileInfo',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('currentdate', models.DateField(default=django.utils.timezone.now)),\n ('location', models.CharField(blank=True, choices=[('Dhaka', 'Dhaka'), ('Narayanganj', 'Narayanganj'), ('Gazipur', 'Gazipur'), ('Cumilla', 'Cumilla'), ('Chittagong', 'Chittagong'), ('Noakhali', 'Noakhali'), ('Jessore', 'Jessore'), ('Khulna', 'Khulna'), ('Barisal', 'Barisal'), ('Rajshahi', 'Rajshahi'), ('Sylhet', 'Sylhet'), ('Rangpur', 'Rangpur'), ('Feni', 'Feni'), ('Pabna', 'Pabna'), ('Faridpur', 'Faridpur'), ('Dinajpur', 'Dinajpur'), ('Coxs Bazar', 'Coxs Bazar'), ('Bogra', 'Bogra'), ('Tangail', 'Tangail'), ('Patuakhali', 'Patuakhali'), ('Lalmonirhat', 'Lalmonirhat'), ('Madaripur', 'Madaripur'), ('Naogaon', 'Naogaon'), ('Rajbari', 'Rajbari'), ('Narail', 'Narail'), ('Pirojpur', 'Pirojpur'), ('Sherpur', 'Sherpur'), ('Mars', 'Mars')], default='', max_length=30, null=True)),\n ('mobileNo', models.CharField(blank=True, default=None, max_length=14, null=True)),\n ('profile_picture', models.ImageField(blank=True, default='/profiles_pic/Advertiser_profile_pic/demo_profile_pic2.png', upload_to='profiles_pic/Advertiser_profile_pic/')),\n ('is_advertiser', models.BooleanField(default=False)),\n ('user', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.5242165327072144, "alphanum_fraction": 0.5555555820465088, "avg_line_length": 20.875, "blob_id": "746ca1510f1066365485e967bfc9c3cb75ad2d1b", "content_id": "cf9b31b82c9e03895709ab36d6ccb0dd354847d3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 351, "license_type": "no_license", "max_line_length": 41, "num_lines": 16, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/grettings.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "import datetime\n\ndef give_grettings(request):\n currentTime = datetime.datetime.now()\n if 5 <= currentTime.hour < 12:\n time = 'morning'\n elif 12 <= currentTime.hour < 17:\n time = 'afternoon'\n elif 17 <= currentTime.hour < 21:\n time = 'evening'\n else:\n time = 'night'\n\n return {\n 'time': time\n }\n\n" }, { "alpha_fraction": 0.5270196795463562, "alphanum_fraction": 0.5382096171379089, "avg_line_length": 46.584415435791016, "blob_id": "733f1f09a5a325f88ec6c76e5b61942ebef29c34", "content_id": "6607695ef523a62f10ca7a06ab45e9634cb84505", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 3664, "license_type": "no_license", "max_line_length": 123, "num_lines": 77, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/templates/view_current_price.html", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "{% extends 'base.html' %}\n{% block content %}\n <style>\n .content {\n max-width: 900px;\n margin: auto;\n }\n </style>\n<div class = \"content\" style=\"padding: 20px 0px 50px 0px;\">\n <br><br>\n <h2 style=\"color: springgreen\"><b>Select a location</b></h2>\n <form action=\"{% url 'current_price_view' %}\" method=\"post\" enctype=\"multipart/form-data\" class=\"formStyle\">\n {% csrf_token %}\n <br>\n <div>\n <label for=\"location\">Location:</label>\n <select id=\"location\" class=\"form-control\" name=\"location\">\n <option value=\"\">---------</option>\n <option value=\"Dhaka\">Dhaka</option>\n <option value=\"Narayanganj\">Narayanganj</option>\n <option value=\"Gazipur\">Gazipur</option>\n <option value=\"Cumilla\">Cumilla</option>\n <option value=\"Chittagong\">Chittagong</option>\n <option value=\"Noakhali\">Noakhali</option>\n <option value=\"Jessore\">Jessore</option>\n <option value=\"Khulna\">Khulna</option>\n <option value=\"Barisal\">Barisal</option>\n <option value=\"Rajshahi\">Rajshahi</option>\n <option value=\"Sylhet\">Sylhet</option>\n <option value=\"Rangpur\">Rangpur</option>\n <option value=\"Feni\">Feni</option>\n <option value=\"Pabna\">Pabna</option>\n <option value=\"Faridpur\">Faridpur</option>\n <option value=\"Dinajpur\">Dinajpur</option>\n <option value=\"Cox's Bazar\">Cox's Bazar</option>\n <option value=\"Bogra\">Bogra</option>\n <option value=\"Tangail\">Tangail</option>\n <option value=\"Patuakhali\">Patuakhali</option>\n <option value=\"Lalmonirhat\">Lalmonirhat</option>\n <option value=\"Madaripur\">Madaripur</option>\n <option value=\"Naogaon\">Naogaon</option>\n <option value=\"Rajbari\">Rajbari</option>\n <option value=\"Narail\">Narail</option>\n <option value=\"Pirojpur\">Pirojpur</option>\n <option value=\"Sherpur\">Sherpur</option>\n <option value=\"Mars\">Mars</option>\n </select>\n </div>\n <br>\n <button type=\"submit\" class=\"btn btn-primary\" style=\"margin: 0px 0px 0px 822px;\"><span>Submit</span></button>\n</form>\n {% if filter == \"no_data\" %}\n <h1 style=\"color: white\">No data found!</h1>\n <h5 style=\"color: white\">Try another location.</h5>\n {% else %}\n\n {% for loc in filter %}\n {% if forloop.last %}\n<!-- <h1 style = \"color:white;\">Estimated current price in {{ loc.location }}</h1>-->\n<!-- <h1 style = \"color:white;\">is {{ loc.current_price }} Taka for per square feet</h1><br>-->\n<!-- <h1 style = \"color:white;\">(updated on {{ loc.update_date }})</h1><br>-->\n<!-- <h1 style = \"color:white;\">Thank you</h1><br>-->\n\n <h1 style = \"color:white;\">Estimated current price,</h1><br>\n <h3 style = \"color:white;\">Location: {{ loc.location }}</h3>\n <h3 style = \"color:white;\">Current price: {{ loc.min_price }} to {{ loc.max_price }} BDT (per square feet)</h3>\n <h3 style = \"color:white;\">Updated on: {{ loc.update_date }}</h3><br>\n <h1 style = \"color:white;\">Thank you</h1><br>\n<!-- <h5 style = \"color:white;\">This feature is not completed yet.</h5>-->\n {% endif %}\n {% endfor %}\n\n {% endif %}\n <br><br>\n <a href=\"{{ request.META.HTTP_REFERER }}\"><button class=\"button\"><span>Back</span></button></a>\n</div>\n{% endblock %}\n" }, { "alpha_fraction": 0.5714285969734192, "alphanum_fraction": 0.5794844031333923, "avg_line_length": 39.9555549621582, "blob_id": "5620db7d0444b4ed622d88404015c2af17b0d047", "content_id": "ff9ee2e3689426021197795a20579ef20a27b897", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1870, "license_type": "no_license", "max_line_length": 128, "num_lines": 45, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/filter.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "from urllib import request\n\nimport django_filters\n\nfrom django import forms\nfrom spyder.config import user\n\nfrom .models import PostAdvertiseTable, CurrentPriceUpdate\n\n\nclass billboardFilter(django_filters.FilterSet):\n location = django_filters.AllValuesFilter(label='• Billboard Location',\n widget=forms.Select(\n attrs={'style': 'width:210px'}))\n size = django_filters.RangeFilter(label='• Billboard Size (in square feet)',\n widget=django_filters.widgets.RangeWidget(\n attrs={'placeholder': ' 0', 'style': 'width:250px'}))\n price = django_filters.RangeFilter(label='• Billboard Price Range',\n widget=django_filters.widgets.RangeWidget(\n attrs={'placeholder': ' 0', 'style': 'width:250px'}))\n # author = django_filters.AllValuesFilter(field_name='author__username', lookup_expr='iexact')\n\n author = django_filters.CharFilter(field_name='author__username', lookup_expr='iexact',\n label='• Advertiser Username',\n widget=forms.TextInput(attrs={'placeholder': ' enter username', 'style': 'width:250px'}))\n\n class Meta:\n model = PostAdvertiseTable\n fields = ['size', 'price', 'location']\n\n\nclass billboardFilter2(django_filters.FilterSet):\n location = django_filters.AllValuesFilter(label=\"\")\n\n class Meta:\n model = PostAdvertiseTable\n fields = ['location']\n\n\n# class viewCurPriceByLoc(django_filters.FilterSet):\n# location = django_filters.AllValuesFilter(label='Location')\n#\n# class Meta:\n# model = CurrentPriceUpdate\n# fields = ['location']\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.5570651888847351, "alphanum_fraction": 0.58423912525177, "avg_line_length": 51.57143020629883, "blob_id": "4f4d19b9dc14869fd72cda5dae8d0f98feaf6a92", "content_id": "67a115ee0c4b9b4c89e7b1971c95df408f93ea4b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1472, "license_type": "no_license", "max_line_length": 762, "num_lines": 28, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/migrations/0003_auto_20210829_1240.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.17 on 2021-08-29 06:40\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('Billboard_Advertisement', '0002_auto_20210828_2254'),\n ]\n\n operations = [\n migrations.RenameField(\n model_name='currentpriceupdate',\n old_name='current_price',\n new_name='max_price',\n ),\n migrations.AddField(\n model_name='currentpriceupdate',\n name='min_price',\n field=models.FloatField(default='0', max_length=10000),\n ),\n migrations.AlterField(\n model_name='postadvertisetable',\n name='location',\n field=models.CharField(choices=[('Dhaka', 'Dhaka'), ('Narayanganj', 'Narayanganj'), ('Gazipur', 'Gazipur'), ('Cumilla', 'Cumilla'), ('Chittagong', 'Chittagong'), ('Noakhali', 'Noakhali'), ('Jessore', 'Jessore'), ('Khulna', 'Khulna'), ('Barisal', 'Barisal'), ('Rajshahi', 'Rajshahi'), ('Sylhet', 'Sylhet'), ('Rangpur', 'Rangpur'), ('Feni', 'Feni'), ('Pabna', 'Pabna'), ('Faridpur', 'Faridpur'), ('Dinajpur', 'Dinajpur'), ('Coxs Bazar', 'Coxs Bazar'), ('Bogra', 'Bogra'), ('Tangail', 'Tangail'), ('Patuakhali', 'Patuakhali'), ('Lalmonirhat', 'Lalmonirhat'), ('Madaripur', 'Madaripur'), ('Naogaon', 'Naogaon'), ('Rajbari', 'Rajbari'), ('Narail', 'Narail'), ('Pirojpur', 'Pirojpur'), ('Sherpur', 'Sherpur'), ('Mars', 'Mars')], default='', max_length=30),\n ),\n ]\n" }, { "alpha_fraction": 0.5431235432624817, "alphanum_fraction": 0.61771559715271, "avg_line_length": 22.83333396911621, "blob_id": "aa386f7fa0bb2d7a1e566eb579edb32324e94319", "content_id": "da5f1fe63e589177748c169b0b90bddfa07b27a6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 429, "license_type": "no_license", "max_line_length": 64, "num_lines": 18, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/migrations/0010_auto_20210913_1458.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.17 on 2021-09-13 08:58\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('Billboard_Advertisement', '0009_auto_20210913_1403'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='confirm_post',\n name='dealDuration',\n field=models.DateTimeField(default=None, null=True),\n ),\n ]\n" }, { "alpha_fraction": 0.5446428656578064, "alphanum_fraction": 0.5558035969734192, "avg_line_length": 28.711111068725586, "blob_id": "cc0e083900498cc7061a03a200c1fd9640ae9f89", "content_id": "4b3103dcba83c5ce26dbf93812ce2e4ea649bab8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2688, "license_type": "no_license", "max_line_length": 120, "num_lines": 90, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/tests/test_forms.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "from django.contrib.auth import get_user_model\nfrom django.test import TestCase\n\nfrom Billboard_Advertisement.forms import confirm_post_form, post_form, changePassForm, customerProfilePicForm, UserForm\n\n\nclass TestForms(TestCase):\n #databases = '__all__'\n def test_confirm_post_form_valid_data(self):\n form = confirm_post_form(data={\n 'dealDuration': '2021-11-21',\n 'adCode': '0013',\n\n })\n # self.assertTrue(form.is_valid())\n\n def test_confirm_post_form_no_data(self):\n form = confirm_post_form(data={})\n\n self.assertFalse(form.is_valid())\n self.assertEqual(len(form.errors),2)\n\n\n\n def test_post_form_valid_data(self):\n form = post_form(data = {\n 'code': \"0013\",\n 'title': \"test_title\",\n 'location': \"Dhaka\",\n 'Spec_loc': \"Jatrabari\",\n 'width': \"12.0\",\n 'height': \"8.0\",\n 'price': \"1000\",\n 'short_desc': \"This is billboard\",\n 'posted_billboards_pic': \"/posted_billboards_pic/billboards_images/demo_billboard_image.JPG\"\n })\n self.assertTrue(form.is_valid())\n\n def test_post_from_form_no_data(self):\n form = post_form(data={})\n\n self.assertFalse(form.is_valid())\n self.assertEqual(len(form.errors),8)\n\n\n\n def test_changePassword_from_valid_data(self):\n form = changePassForm(data = {\n 'old_password': \"1234\",\n 'new_password': \"abcd\",\n 're_new_password': \"abcd\",\n })\n self.assertTrue(form.is_valid())\n\n def test_changePassword_form_no_data(self):\n form = changePassForm(data={})\n\n self.assertFalse(form.is_valid())\n self.assertEqual(len(form.errors),3)\n\n\n\n\n # def test_user_from_valid_data(self):\n # form = UserForm(data = {\n # 'username': \"testuser\",\n # 'password': \"abcd1234\",\n # 'first_name': \"Samir\",\n # 'last_name': \"Asif\",\n # 'email': \"[email protected]\",\n # })\n # self.assertTrue(form.is_valid())\n #\n # def test_user_form_no_data(self):\n # form = UserForm(data={})\n #\n # self.assertFalse(form.is_valid())\n # self.assertEqual(len(form.errors),2)\n\n # def test_profilePic_from_valid_data(self):\n # form = customerProfilePicForm(data = {\n # 'profile_picture': \"Profile Picture\",\n # })\n # self.assertTrue(form.is_valid())\n #\n # def test_profilePic_form_no_data(self):\n # form = customerProfilePicForm(data={})\n #\n # self.assertFalse(form.is_valid())\n # self.assertEqual(len(form.errors),1)\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.6702046990394592, "alphanum_fraction": 0.6821455359458923, "avg_line_length": 41.41935348510742, "blob_id": "210b80dc697a52a4470f71bbf9da43b066da47b9", "content_id": "aebe694b4d068b6189f1c1a4dfd88b3e99e1c27d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5276, "license_type": "no_license", "max_line_length": 171, "num_lines": 124, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/models.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "import datetime\nfrom datetime import date\nfrom django.db import models\nfrom django.contrib.auth.models import User, AbstractUser\nfrom django.utils import timezone\nfrom django.db.models.signals import post_save\nfrom django.dispatch import receiver\nfrom django.conf import settings\n\nlocations = [\n ('Dhaka', 'Dhaka'),\n ('Narayanganj', 'Narayanganj'),\n ('Gazipur', 'Gazipur'),\n ('Cumilla', 'Cumilla'),\n ('Chittagong', 'Chittagong'),\n ('Noakhali', 'Noakhali'),\n ('Jessore', 'Jessore'),\n ('Khulna', 'Khulna'),\n ('Barisal', 'Barisal'),\n ('Rajshahi', 'Rajshahi'),\n ('Sylhet', 'Sylhet'),\n ('Rangpur', 'Rangpur'),\n ('Feni', 'Feni'),\n ('Pabna', 'Pabna'),\n ('Faridpur', 'Faridpur'),\n ('Dinajpur', 'Dinajpur'),\n ('Coxs Bazar', 'Coxs Bazar'),\n ('Bogra', 'Bogra'),\n ('Tangail', 'Tangail'),\n ('Patuakhali', 'Patuakhali'),\n ('Lalmonirhat', 'Lalmonirhat'),\n ('Madaripur', 'Madaripur'),\n ('Naogaon', 'Naogaon'),\n ('Rajbari', 'Rajbari'),\n ('Narail', 'Narail'),\n ('Pirojpur', 'Pirojpur'),\n ('Sherpur', 'Sherpur'),\n ('Mars', 'Mars'),\n]\n\nclass CustomerProfileInfo(models.Model):\n user = models.OneToOneField(User, on_delete=models.CASCADE, blank=True, null=True)\n currentdate = models.DateField(default=timezone.now)\n location = models.CharField(max_length=30, default='', blank=True, null=True, choices=locations)\n mobileNo = models.CharField(max_length=14, default=None, blank=True, null=True)\n profile_picture = models.ImageField(upload_to='profiles_pic/Customer_profile_pic/', default='/profiles_pic/Customer_profile_pic/demo_profile_pic2.png', blank=True)\n is_customer = models.BooleanField(default=False)\n objects = models.Manager()\n\n def __str__(self):\n return str(self.user)\n\nclass AdvertiserProfileInfo(models.Model):\n user = models.OneToOneField(User, on_delete=models.CASCADE, blank=True, null=True)\n currentdate = models.DateField(default=timezone.now)\n location = models.CharField(max_length=30, default='', blank=True, null=True, choices=locations)\n mobileNo = models.CharField(max_length=14, default=None, blank=True, null=True)\n profile_picture = models.ImageField(upload_to='profiles_pic/Advertiser_profile_pic/', default='/profiles_pic/Advertiser_profile_pic/demo_profile_pic2.png', blank=True)\n is_advertiser = models.BooleanField(default=False)\n objects = models.Manager()\n\n def __str__(self):\n return str(self.user)\n\nclass CityCorporationProfileInfo(models.Model):\n user = models.OneToOneField(User, on_delete=models.CASCADE, blank=True, null=True)\n currentdate = models.DateField(default=timezone.now)\n location = models.CharField(max_length=30, default='', blank=True, null=True, choices=locations)\n mobileNo = models.CharField(max_length=14, default=None, blank=True, null=True)\n profile_picture = models.ImageField(upload_to='profiles_pic/cityCor_profile_pic', default='/profiles_pic/cityCor_profile_pic/demo_profile_pic2.png', blank=True)\n is_cityCor = models.BooleanField(default=False)\n objects = models.Manager()\n\n def __str__(self):\n return str(self.user)\n\n\n\nclass PostAdvertiseTable(models.Model):\n author = models.ForeignKey(User, on_delete=models.CASCADE, default=None, related_name=\"adPoster\")\n code = models.CharField(max_length=100, primary_key=True)\n title = models.CharField(max_length = 100, default=None)\n location = models.CharField(max_length=30, default='', blank=False, choices=locations)\n Spec_loc = models.CharField(max_length=100, default=None)\n width = models.FloatField(max_length=100, default='0')\n height = models.FloatField(max_length=100, default='0')\n size = models.FloatField(max_length=100, default='0')\n price = models.CharField(max_length=100, default=None)\n short_desc = models.TextField(max_length=500, default=None)\n post_date = models.DateField(default=timezone.now)\n posted_billboards_pic = models.ImageField(upload_to='posted_billboards_pic/billboards_images',\n default='/posted_billboards_pic/billboards_images/demo_billboard_image.JPG',\n blank=True)\n objects = models.Manager()\n\n def save(self, *args, **kwargs):\n self.size = float(self.width) * float(self.height)\n super(PostAdvertiseTable, self).save(*args, **kwargs)\n\n def __str__(self):\n return self.code\n\n\nclass confirm_post(models.Model):\n confirmed_by = models.ForeignKey(User, on_delete=models.CASCADE, default=None)\n confirmed_date = models.DateField(default=timezone.now)\n dealDuration = models.DateField(default=None)\n adCode = models.CharField(max_length=10, unique=True)\n advertiser = models.CharField(max_length=100, default=None)\n objects = models.Manager()\n\n def __str__(self):\n return self.adCode\n\n\nclass CurrentPriceUpdate(models.Model):\n location = models.CharField(max_length=30, default='', choices=locations)\n min_price = models.FloatField(max_length=10000, default='0')\n max_price = models.FloatField(max_length=10000, default='0')\n update_date = models.DateField(default=timezone.now)\n objects = models.Manager()\n\n def __str__(self):\n return str(self.update_date)\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.6187845468521118, "alphanum_fraction": 0.6235727667808533, "avg_line_length": 40.4461555480957, "blob_id": "211f3ce10ce31d0ce2c5080db1bdc348dedb837b", "content_id": "41c3f4b957501261e6f463998e6387b65b30c0ee", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2715, "license_type": "no_license", "max_line_length": 112, "num_lines": 65, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/tests/extra_tests.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "#\n#\n# class UserTest(TestCase):\n#\n# def setUp(self):\n# self.user = get_user_model().objects.create_user(\n# username='testuser', password='secret', first_name='Samir', last_name='Asif', email='[email protected]')\n#\n# def test_content(self):\n# user = User.objects.get(id=1)\n# expected_object_username = f'{user.username}'\n# # expected_object_password = f'{user.password}'\n# expected_object_first_name = f'{user.first_name}'\n# expected_object_last_name = f'{user.last_name}'\n# expected_object_email = f'{user.email}'\n# self.assertEquals(expected_object_username, 'testuser')\n# # self.assertEquals(expected_object_password, 'secret')\n# self.assertEquals(self.user.check_password('secret'), True)\n# self.assertEquals(expected_object_first_name, 'Samir')\n# self.assertEquals(expected_object_last_name, 'Asif')\n# self.assertEquals(expected_object_email, '[email protected]')\n#\n# def test_user_label(self):\n# user = User.objects.get(id=1)\n# field_label = user._meta.get_field('username').verbose_name\n# self.assertEqual(field_label, 'username')\n#\n# def test_object_name_is_username(self):\n# user = User.objects.get(id=1)\n# expected_object_name = f'{user.username}'\n# self.assertEqual(str(user), expected_object_name)\n#\n#\n#\n# def test_is_advertiser_label(self):\n# user = AdvertiserProfileInfo.objects.get(id=1)\n# field_label = user._meta.get_field('is_advertiser').verbose_name\n# self.assertEqual(field_label, 'is advertiser')\n#\n# def test_mobileNo_max_length(self):\n# user = AdvertiserProfileInfo.objects.get(id=1)\n# max_length = user._meta.get_field('mobileNo').max_length\n# self.assertEqual(max_length, 14)\n#\n# def test_object_name_is_user(self):\n# user = AdvertiserProfileInfo.objects.get(id=1)\n# expected_object_name = f'{user.user}'\n# self.assertEqual(str(user), expected_object_name)\n#\n#\n#\n# def test_is_cityCor_label(self):\n# user = CityCorporationProfileInfo.objects.get(id=1)\n# field_label = user._meta.get_field('is_cityCor').verbose_name\n# self.assertEqual(field_label, 'is cityCor')\n#\n# def test_mobileNo_max_length(self):\n# user = CityCorporationProfileInfo.objects.get(id=1)\n# max_length = user._meta.get_field('mobileNo').max_length\n# self.assertEqual(max_length, 14)\n#\n# def test_object_name_is_user(self):\n# user = CityCorporationProfileInfo.objects.get(id=1)\n# expected_object_name = f'{user.user}'\n# self.assertEqual(str(user), expected_object_name)\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.78899085521698, "alphanum_fraction": 0.78899085521698, "avg_line_length": 20.799999237060547, "blob_id": "8e4cfe47adf84c716ceed226b878b956e7442dee", "content_id": "8fa92417b6ef2e5c9aea440d938ebe31f8e7f557", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 109, "license_type": "no_license", "max_line_length": 36, "num_lines": 5, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/apps.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "from django.apps import AppConfig\n\n\nclass GoldbankappConfig(AppConfig):\n name = 'Billboard_Advertisement'\n" }, { "alpha_fraction": 0.5736842155456543, "alphanum_fraction": 0.6263157725334167, "avg_line_length": 21.352941513061523, "blob_id": "b8c01e27b277a60f4217ff8aa36432fcb64e69db", "content_id": "c7f59b257dd21fb769397a8c6417d42d0770be13", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 380, "license_type": "no_license", "max_line_length": 78, "num_lines": 17, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/migrations/0008_remove_advertiserprofileinfo_num_of_post.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.17 on 2021-09-11 07:18\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('Billboard_Advertisement', '0007_advertiserprofileinfo_num_of_post'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='advertiserprofileinfo',\n name='num_of_post',\n ),\n ]\n" }, { "alpha_fraction": 0.5853480100631714, "alphanum_fraction": 0.6080586314201355, "avg_line_length": 34.921051025390625, "blob_id": "dfb3c5f5dd37a540c8d2f9112777877e2ec3a596", "content_id": "1fd6f57b79e6dbb659fc91f71a4cee261f0cd7fa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1365, "license_type": "no_license", "max_line_length": 111, "num_lines": 38, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/migrations/0017_auto_20210917_1419.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.5 on 2021-09-17 08:19\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('Billboard_Advertisement', '0016_auto_20210917_1350'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='advertiserprofileinfo',\n name='id',\n field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),\n ),\n migrations.AlterField(\n model_name='citycorporationprofileinfo',\n name='id',\n field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),\n ),\n migrations.AlterField(\n model_name='confirm_post',\n name='id',\n field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),\n ),\n migrations.AlterField(\n model_name='currentpriceupdate',\n name='id',\n field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),\n ),\n migrations.AlterField(\n model_name='customerprofileinfo',\n name='id',\n field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),\n ),\n ]\n" }, { "alpha_fraction": 0.875, "alphanum_fraction": 0.875, "avg_line_length": 32, "blob_id": "43a83b91033503b0f1ea098d12553c2c23a5f30f", "content_id": "90fcb6ad1279c68e31b2bd57d892de9953d03904", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 32, "license_type": "no_license", "max_line_length": 32, "num_lines": 1, "path": "/README.md", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "# Billboard-Advertisement-System" }, { "alpha_fraction": 0.6033950448036194, "alphanum_fraction": 0.6203703880310059, "avg_line_length": 37.880001068115234, "blob_id": "638b4547ac6c03244fd461851dca52bc95537287", "content_id": "e8b35749d072f2bfcf5aa662438787e4f1c2c2d3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1944, "license_type": "no_license", "max_line_length": 150, "num_lines": 50, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/migrations/0005_auto_20210911_1229.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.17 on 2021-09-11 06:29\n\nfrom django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('Billboard_Advertisement', '0004_auto_20210901_2322'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='advertiserprofileinfo',\n name='id',\n field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),\n ),\n migrations.AlterField(\n model_name='citycorporationprofileinfo',\n name='id',\n field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),\n ),\n migrations.AlterField(\n model_name='confirm_post',\n name='advertiser',\n field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='adPoster2', to=settings.AUTH_USER_MODEL),\n ),\n migrations.AlterField(\n model_name='confirm_post',\n name='id',\n field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),\n ),\n migrations.AlterField(\n model_name='currentpriceupdate',\n name='id',\n field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),\n ),\n migrations.AlterField(\n model_name='customerprofileinfo',\n name='id',\n field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),\n ),\n migrations.AlterField(\n model_name='postadvertisetable',\n name='author',\n field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='adPoster', to=settings.AUTH_USER_MODEL),\n ),\n ]\n" }, { "alpha_fraction": 0.842900276184082, "alphanum_fraction": 0.842900276184082, "avg_line_length": 38, "blob_id": "b10a6b241f4fe0cf37d80e164b3ccc85b537184f", "content_id": "9eb04beee0b86ca20ed3e43d30cb277a496d6626", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 662, "license_type": "no_license", "max_line_length": 107, "num_lines": 17, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/admin.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "from django.contrib import admin\n\n# Register your models here.\nfrom .models import CustomerProfileInfo, AdvertiserProfileInfo, CityCorporationProfileInfo, confirm_post, \\\n PostAdvertiseTable, CurrentPriceUpdate\n\nadmin.site.site_header = 'Billboard Advertisement System admin'\nadmin.site.site_title = 'Billboard Advertisement System admin'\nadmin.site.index_title = 'Billboard Advertisement System administration'\n\n\nadmin.site.register(CustomerProfileInfo)\nadmin.site.register(AdvertiserProfileInfo)\nadmin.site.register(CityCorporationProfileInfo)\nadmin.site.register(PostAdvertiseTable)\nadmin.site.register(confirm_post)\nadmin.site.register(CurrentPriceUpdate)" }, { "alpha_fraction": 0.7134909629821777, "alphanum_fraction": 0.7273991703987122, "avg_line_length": 36.842105865478516, "blob_id": "ee41662588935000307899d801664b34fe646508", "content_id": "e71d469fab2b5725c0945446e3340af6ab17c5f1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 1440, "license_type": "no_license", "max_line_length": 167, "num_lines": 38, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/templates/about.html", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "<!DOCTYPE html>\n{% extends 'newBase2.html' %}\n{% block content %}\n{% load static %}\n<html>\n<head>\n<meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n<style>\nh4 {text-align: center;}\np {text-align: center;}\nbody {\n font-family: Arial, Helvetica, sans-serif;\n margin: 0;\n}\n.about-section {\n padding: 50px;\n text-align: center;\n background-color: #000000;\n color: white;\n}\n</style>\n</head>\n<body>\n\n<div class=\"about-section\">\n <h1>About The Project</h1><br>\n <p>This is a billboard advertisement system of Bangladesh.</p>\n <p>Our mission is to provide an efficient system where dealers and customers can make deals very easily.</p><br><br>\n <img src=\"/static/assets/image/about.gif\" alt=\"Image\" width=\"500\" height=\"300\"><br><br><br><br>\n <p>The billboard advertisement system of our country has been done manually. Sometimes it is difficult for advertisers to find a proper customer for their</p>\n <p>advertisements. Customers also find it difficult to find billboards for their advertisements. By using this application, they can find all the services in one</p>\n <p>place. This application can turn the whole country’s billboard advertisement system into a well-planned system. City corporations can monitor all the</p>\n <p>activities. Everyone can view which billboards are currently empty and which billboards are sold to a customer and the deal duration also.</p>\n</div>\n\n</body>\n</html>\n{% endblock %}\n" }, { "alpha_fraction": 0.5721966028213501, "alphanum_fraction": 0.5906298160552979, "avg_line_length": 55.60869598388672, "blob_id": "8c4e293444cae9c98d6cdcd95ecc4a29ab170639", "content_id": "0507c9765d44c0f70fda53dcd05b076a6d460d14", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1302, "license_type": "no_license", "max_line_length": 774, "num_lines": 23, "path": "/Billboard-Advertisement-System/Billboard_Advertisement/Billboard_Advertisement/migrations/0002_auto_20210828_2254.py", "repo_name": "Samir529/Billboard-Advertisement-System", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.17 on 2021-08-28 16:54\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('Billboard_Advertisement', '0001_initial'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='confirm_post',\n name='adCode',\n field=models.CharField(max_length=10, unique=True),\n ),\n migrations.AlterField(\n model_name='postadvertisetable',\n name='location',\n field=models.CharField(blank=True, choices=[('Dhaka', 'Dhaka'), ('Narayanganj', 'Narayanganj'), ('Gazipur', 'Gazipur'), ('Cumilla', 'Cumilla'), ('Chittagong', 'Chittagong'), ('Noakhali', 'Noakhali'), ('Jessore', 'Jessore'), ('Khulna', 'Khulna'), ('Barisal', 'Barisal'), ('Rajshahi', 'Rajshahi'), ('Sylhet', 'Sylhet'), ('Rangpur', 'Rangpur'), ('Feni', 'Feni'), ('Pabna', 'Pabna'), ('Faridpur', 'Faridpur'), ('Dinajpur', 'Dinajpur'), ('Coxs Bazar', 'Coxs Bazar'), ('Bogra', 'Bogra'), ('Tangail', 'Tangail'), ('Patuakhali', 'Patuakhali'), ('Lalmonirhat', 'Lalmonirhat'), ('Madaripur', 'Madaripur'), ('Naogaon', 'Naogaon'), ('Rajbari', 'Rajbari'), ('Narail', 'Narail'), ('Pirojpur', 'Pirojpur'), ('Sherpur', 'Sherpur'), ('Mars', 'Mars')], default='', max_length=30),\n ),\n ]\n" } ]
31
eustinova/ma_test
https://github.com/eustinova/ma_test
308a79a991470ff3f0aee2498b1535affe10f859
60b34bc9729ce660d35a487ac0c426d5ff2f3e42
c5c867a81cdae579e03282f58acac4d815903c79
refs/heads/main
2023-02-16T00:28:40.986783
2021-01-15T13:42:57
2021-01-15T13:42:57
329,922,330
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7022900581359863, "alphanum_fraction": 0.707379162311554, "avg_line_length": 64.5, "blob_id": "1e75d0146015046a094cd37ca8ee34f04fce034f", "content_id": "937fec092ea4362d81016018ce24ceb924b5e4d3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 481, "license_type": "no_license", "max_line_length": 111, "num_lines": 6, "path": "/MA-test.py", "repo_name": "eustinova/ma_test", "src_encoding": "UTF-8", "text": "import pandas as pd\nma_df = pd.read_csv('test_pandas.csv') #загружаю данные в dataframe\nma_df['sum'] = ma_df['price_per_job'] * ma_df['jobs_made'] # высчитываю поле sum\nved_df = ma_df.groupby(['uid'])['sum'].sum().reset_index() #формирую группировку по uid и скалдываю по полю sum\n#print(ved_df)\nved_df.to_csv(r'df-test1.csv', index=False, header=True) #сохраняю результат в файл df-test1.csv\n" }, { "alpha_fraction": 0.6946264505386353, "alphanum_fraction": 0.7129750847816467, "avg_line_length": 83.77777862548828, "blob_id": "9d868a27a70d5b434e77b5197486d3af18970b49", "content_id": "5af330715b5316c2f985932a896b973a2097df27", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 963, "license_type": "no_license", "max_line_length": 141, "num_lines": 9, "path": "/2.MA-test.py", "repo_name": "eustinova/ma_test", "src_encoding": "UTF-8", "text": "import pandas as pd\ndata = pd.read_csv(\"test_pandas.csv\") #читаю данные в dataframe\ndata['sum'] = data['price_per_job'] * data['jobs_made'] #добавляю поле sum, в котором получаю сумму выполненных работ\nved_df = data.groupby(['uid'])['sum'].sum().reset_index() #групперую по uid и суммирую по полю sun.\nsort_df = ved_df.sort_values(by=['sum'], ascending=False) #сортирую по убыванию\nsort_df['cumsum'] = sort_df['sum'].cumsum() #добавляю поле cumsum и высчитываю кумулятивное значение\nsum60perc = sort_df['sum'].sum() * 0.6 #нахожу значение 60% от всех сумм\n#print(\"uids 60%:\", sort_df.loc[sort_df['cumsum'] <= sum60perc])\nsort_df.loc[sort_df['cumsum'] <= sum60perc].to_csv(r'df-test2.csv', index=False, header=True) #сохраняю результат выборки в файл df-test2.csv\n" } ]
2
hkiepe/fastAPI-rentalguard
https://github.com/hkiepe/fastAPI-rentalguard
e1b08b0782887732526b660ed527c82fa3ad4818
15d44d7ae03112b2fdb5e004016debc36b93fa49
876fc365674d3c0f2bcc3819802d22b053beff81
refs/heads/master
2023-06-20T14:18:52.705037
2021-07-25T16:36:00
2021-07-25T16:36:00
373,106,583
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7953668236732483, "alphanum_fraction": 0.7953668236732483, "avg_line_length": 36, "blob_id": "f130172f5641ad6fbc04967c47cafd968b2fe0c5", "content_id": "b0198d199a349302aa18ce86f8caf9802825b44b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 259, "license_type": "no_license", "max_line_length": 60, "num_lines": 7, "path": "/README.md", "repo_name": "hkiepe/fastAPI-rentalguard", "src_encoding": "UTF-8", "text": "# API for the rental client application\n* Based on FastAPI framework\n* Returns Auth bearer token for authenticate the client/user\n* Uses FastSQL databse to store data\n* Returns current rented bikes\n* Add bikes to the database\n* Delete bikes from the database\n" }, { "alpha_fraction": 0.6277372241020203, "alphanum_fraction": 0.6350364685058594, "avg_line_length": 19.04878044128418, "blob_id": "b5c381acf4305446f854a0bd325e7c20fac289d8", "content_id": "6e3bc106633d86a5324de6ebb05ac13e394c269c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 822, "license_type": "no_license", "max_line_length": 74, "num_lines": 41, "path": "/main.py", "repo_name": "hkiepe/fastAPI-rentalguard", "src_encoding": "UTF-8", "text": "from fastapi import FastAPI\nfrom typing import Optional\nfrom pydantic import BaseModel\n\n\napp = FastAPI()\n\n\[email protected]('/blog')\ndef index(limit = 10, published: bool = True, sort: Optional[str] = None):\n # limited amount of blogs\n if published:\n return {'data': f'blog list {limit}'}\n else:\n return {'data': 'all the blogs'}\n\n\[email protected]('/blog/unpublished')\ndef unpublished():\n return {'data': 'all unpublished blogs'}\n\n\[email protected]('/blog/{id}')\ndef show(id: int):\n return {'data': id}\n\n\[email protected]('/blog/{id}/comments')\ndef show(id, limit = 10):\n return {'data': {'comment 1', 'comment 2', limit}}\n\n\nclass Blog(BaseModel):\n title: str\n body: str\n published: Optional[bool]\n\n\[email protected]('/blog')\ndef create_blog(request: Blog):\n return {'data': f'Blog is cretaed with {request.title}'}\n" } ]
2
mstysin/revolutionindex
https://github.com/mstysin/revolutionindex
01dc22fbbdda3f5ed761d631084460137eaddef7
ee324e305aeec9bd59cbe465e57f5e6ab3774aa8
0bab86ff1b90e3e5872a585d102074ffc3675745
refs/heads/master
2016-08-04T07:30:12.894765
2015-09-01T15:56:28
2015-09-01T15:56:28
41,318,824
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7155172228813171, "alphanum_fraction": 0.75, "avg_line_length": 32.28571319580078, "blob_id": "9d616150caf8dbcc0e1f517b55af47c71e76e174", "content_id": "1da4ec426adca8cc2af9e339dcd971b4f13bfa6e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 232, "license_type": "no_license", "max_line_length": 49, "num_lines": 7, "path": "/prices/models.py", "repo_name": "mstysin/revolutionindex", "src_encoding": "UTF-8", "text": "from django.db import models\n\n# Create your models here.\nclass Prices(models.Model):\n\twti_price = models.FloatField(default=\"50.00\")\n\trub_price = models.FloatField(default = \"60.00\")\n\tpub_date = models.DateTimeField('date recorded')" }, { "alpha_fraction": 0.6821862459182739, "alphanum_fraction": 0.7125505805015564, "avg_line_length": 34.35714340209961, "blob_id": "a2f262647d41abe7fa2fcb202bf3e2be79233ad4", "content_id": "b57a69ec4519293133b41c9f3c5715d1d9a6bca1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 494, "license_type": "no_license", "max_line_length": 58, "num_lines": 14, "path": "/coworkings/models.py", "repo_name": "mstysin/revolutionindex", "src_encoding": "UTF-8", "text": "from django.db import models\n\n# Create your models here.\n\nclass Coworkings(models.Model):\n\tname = models.CharField(max_length=50)\n\tname_eng = models.CharField(max_length=50)\n\tgeotag = models.CharField(max_length = 50)\n\trating = models.FloatField(default='8.0')\n\tpicnumber = models.IntegerField(default='0')\n\tprice = models.IntegerField(default='500')\n\tpic_number = models.IntegerField(default='0')\n\tdef __str__(self): # __unicode__ on Python 2\n\t\treturn self.name_eng.decode('utf8')" }, { "alpha_fraction": 0.6860986351966858, "alphanum_fraction": 0.6860986351966858, "avg_line_length": 27, "blob_id": "5eebdfb43d26b05f3b8b708e3f4a9e7294a7fcbf", "content_id": "0c36fa9c21a6c5182e39f6ec2ba4ea29e8fdeb18", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 223, "license_type": "no_license", "max_line_length": 99, "num_lines": 8, "path": "/coworkings/urls.py", "repo_name": "mstysin/revolutionindex", "src_encoding": "UTF-8", "text": "from django.conf.urls import url\n\nfrom . import views \n\nurlpatterns = [\n\turl(r'^$', views.coworking, name = 'coworking'),\n\turl(r'^coworking/(?P<coworking_name>\\w+)/$', views.coworking_details, name = 'coworking_details'),\n]" }, { "alpha_fraction": 0.6966824531555176, "alphanum_fraction": 0.7026066184043884, "avg_line_length": 28.10344886779785, "blob_id": "6b3962b1d35a427a5fb72f399c3fa289b0011fbb", "content_id": "ede2f071dff3228233c980d5031a482c5d677bd5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 844, "license_type": "no_license", "max_line_length": 106, "num_lines": 29, "path": "/prices/management/commands/getdata.py", "repo_name": "mstysin/revolutionindex", "src_encoding": "UTF-8", "text": "#real time oil api based on bloomberg\n\nfrom django.core.management.base import BaseCommand, CommandError\nfrom prices.models import Prices\n\nimport urllib2\nimport re\nfrom datetime import datetime\n\nclass Command(BaseCommand):\n\tdef handle(self, *args, **options):\n\n\t\topener = urllib2.build_opener()\n\t\turl = 'http://www.bloomberg.com/quote/CL1:COM'\n\t\tresponse = opener.open(url).readlines()\n\t\tresponse = ''.join(response)\n\n\t\twtiprice = re.findall('price\">(.+?)<', response)\n\t\tprint \"WTI price is\" + str(wtiprice)\n\n\t\turl = 'http://www.bloomberg.com/quote/USDRUB:CUR'\n\t\tresponse = opener.open(url).readlines()\n\t\tresponse = ''.join(response)\n\n\t\trubprice = re.findall('price\">(.+?)<', response)\n\t\tprint \"RUB price is\" + str(rubprice)\n\n\t\tq = Prices.objects.create(pub_date = datetime.today(), wti_price = wtiprice[0], rub_price = rubprice[0])\n\t\tq.save() " }, { "alpha_fraction": 0.77224200963974, "alphanum_fraction": 0.774021327495575, "avg_line_length": 24.590909957885742, "blob_id": "5b93b22ac4cd3644cac7bfc68deb940e618d42f5", "content_id": "15141b1b7f001f00518f95acbf1153e14667e854", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 562, "license_type": "no_license", "max_line_length": 62, "num_lines": 22, "path": "/coworkings/views.py", "repo_name": "mstysin/revolutionindex", "src_encoding": "UTF-8", "text": "from django.shortcuts import render\n\n# Create your views here.\n\n\nfrom coworkings.models import Coworkings\n\ndef coworking(request):\n\tcoworkings = Coworkings.objects.all()\n\tactivecoworking = Coworkings.objects.all()[4]\n\tcontext = {\n\t'coworkings':coworkings,\n\t'activecoworking':activecoworking\n\t}\n\treturn render(request, 'coworkings/coworkings.html', context)\n\ndef coworking_details(request, coworking_name):\n\tcoworking = Coworkings.objects.get(name_eng=coworking_name)\n\tcontext = {\n\t'coworking':coworking\n\t}\n\treturn render(request, 'coworkings/item.html', context)" } ]
5
LongCohol/HerokuKMOUWebsite
https://github.com/LongCohol/HerokuKMOUWebsite
693cd8a5e50aa896b943bce8cd1e381908e9e519
0e82347615f1b7154ee84e22b33d396cc6c19b73
70d3b3be21127984799c79e38bd25f93fe41f59e
refs/heads/master
2023-06-05T06:33:26.456358
2021-06-27T12:14:32
2021-06-27T12:14:32
380,637,903
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.703005313873291, "alphanum_fraction": 0.7147908210754395, "avg_line_length": 44.864864349365234, "blob_id": "1f3bd87250d43619b8a6225f149f1822dc0ca347", "content_id": "aadba4dde2647e9db4d56fb0514fd55191225ad5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1697, "license_type": "no_license", "max_line_length": 120, "num_lines": 37, "path": "/WebsiteCompany_2021_03/urls.py", "repo_name": "LongCohol/HerokuKMOUWebsite", "src_encoding": "UTF-8", "text": "\"\"\"WebsiteCompany_2021_03 URL Configuration\n\nThe `urlpatterns` list routes URLs to views. For more information please see:\n https://docs.djangoproject.com/en/3.2/topics/http/urls/\nExamples:\nFunction views\n 1. Add an import: from my_app import views\n 2. Add a URL to urlpatterns: path('', views.home, name='home')\nClass-based views\n 1. Add an import: from other_app.views import Home\n 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')\nIncluding another URLconf\n 1. Import the include() function: from django.urls import include, path\n 2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))\n\"\"\"\n# from django.contrib import admin\nfrom django.conf import settings\nfrom django.conf.urls.static import static\nfrom django.urls import path\nfrom Operator.views import adminView, frontView, logged_out, staff_in, customer_in, shipment_print, mainView1, mainView2\n# import notifications.urls\nfrom django.conf.urls import url\nfrom django.conf.urls import include\n\nurlpatterns = [\n # path('admin/', admin.site.urls),\n \n path('administration/', adminView, name=\"adminPage\"),\n path('', frontView, name=\"frontpage\"),\n path('staff_in/', staff_in, name=\"staff_redirect\"),\n path('customer_in/', customer_in, name=\"customer_redirect\"),\n # url('^inbox/notifications/', include(notifications.urls, namespace='notifications')),\n path('logged_out/', logged_out, name=\"logged_out\"),\n path('main_page_staff/', mainView1, name=\"mainPage1\"),\n path('main_page_customer/', mainView2, name=\"mainPage2\"),\n path('shipment_printing/', shipment_print, name=\"shipment_print\"),\n] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n" }, { "alpha_fraction": 0.6926271915435791, "alphanum_fraction": 0.6988577246665955, "avg_line_length": 40.869564056396484, "blob_id": "d129b1dd3c3eaea46e8ecb7e0b9a23a92500bc4a", "content_id": "6bd7b250d67b483e9482e5a0af4e8c35b1977933", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 963, "license_type": "no_license", "max_line_length": 105, "num_lines": 23, "path": "/Company/models.py", "repo_name": "LongCohol/HerokuKMOUWebsite", "src_encoding": "UTF-8", "text": "# from django.conf import settings\n# from django.db import models\n# from django.contrib.auth.models import AbstractBaseUser\n#\n#\n# MAX_LENGTH = 30\n# MAX_VESSEL_LENGTH = 1200\n#\n#\n# class Company(AbstractBaseUser):\n# user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, primary_key=True)\n# companyID = models.CharField(verbose_name=\"Company ID\", max_length=MAX_LENGTH, default=None,\n# unique=True)\n#\n# companyName = models.CharField(verbose_name=\"Company\", max_length=MAX_LENGTH, default=None)\n# companyEmail = models.EmailField(verbose_name=\"Company Email\", max_length=MAX_LENGTH, default=None)\n# vesselList = models.TextField(verbose_name=\"Vessel List\", max_length=MAX_VESSEL_LENGTH)\n# CPNpassword = models.CharField(verbose_name=\"Password\", max_length=MAX_LENGTH, default=None)\n#\n# USERNAME_FIELD = 'companyID'\n#\n# class Meta:\n# db_table = \"shipping_company_table\"\n" }, { "alpha_fraction": 0.6589147448539734, "alphanum_fraction": 0.6589147448539734, "avg_line_length": 23.967741012573242, "blob_id": "28b561ae70de079f43676a27e9f62df41b378a3f", "content_id": "1c0e7dffef590b787d1f8e968b949bfb39f3f9c0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 774, "license_type": "no_license", "max_line_length": 66, "num_lines": 31, "path": "/templates/send_notification.php", "repo_name": "LongCohol/HerokuKMOUWebsite", "src_encoding": "UTF-8", "text": "<?php\nfunction sendNotification(){\n\t$url = \"https://fcm.googleapis.com/fcm/send\";\n\n\t$fields = array(\n\t\t\"to\"=>$_REQUEST['token'],\n\t\t\"notification\"=>array(\n\t\t\t\"body\"=>$_REQUEST['message'],\n\t\t\t\"title\"=>$_REQUEST['title'],\n\t\t\t\"icon\"=>$_REQUEST['icon'],\n\t\t\t\"click_action\"=>\"https://google.com\"\n\t\t)\n\t);\n\n\t$headers=array(\n\t\t'Authorization: key=AAAATU0EZtE:APA91bEzkNMDAqlVFuEtQ0nMu1rZc4bM2yWjzjPktyeBVPywZpMNkbIXg0FBJj2alSONHZGllHzSXYRwSUdFN93jfbSRk4MkWHq-mCX9h2DTEy5RU_6kAeZ8I8ChLf59dP3K0MITzQvJ',\n\t\t'Content_Type:application/json',\n\t);\n\n\t$ch=curl_init();\n\tcurl_setopt($ch, option:CURLOPT_URL,$url);\n\tcurl_setopt($ch, option:CURLOPT_POST,value:true);\n\tcurl_setopt($ch, option:CURLOPT_HTTPHEADER,$headers);\n\tcurl_setopt($ch, option:CURLOPT_RETURNTRANSFER,value:true);\n\tcurl_setopt($ch, option:CURLOPT_POSTFIELDS,json_encode($fields));\n\t$result=curl_exec($ch);\n\tprint_r($result);\n\tcurl_close($ch);\n}\nsendNotification();\n?>\t" }, { "alpha_fraction": 0.5804347991943359, "alphanum_fraction": 0.5972825884819031, "avg_line_length": 51.57143020629883, "blob_id": "7de4bf33445b2f06dfeb65e7c7ba07b40f184854", "content_id": "00df7212874ae229c549d6758430abe49e4edec4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1840, "license_type": "no_license", "max_line_length": 190, "num_lines": 35, "path": "/User/migrations/0001_initial.py", "repo_name": "LongCohol/HerokuKMOUWebsite", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2 on 2021-06-27 21:12\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='Account',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),\n ('userID', models.CharField(default=None, max_length=30, unique=True, verbose_name='Account ID')),\n ('password', models.CharField(default=None, max_length=100, verbose_name='Password')),\n ('rawPassword', models.CharField(default=None, max_length=30, verbose_name='Raw Password')),\n ('email', models.EmailField(blank=True, default='', max_length=30, verbose_name='Email')),\n ('companyName', models.CharField(blank=True, default='', max_length=30, verbose_name='Company')),\n ('vesselList', models.TextField(blank=True, default='', max_length=1200, verbose_name='Vessel List')),\n ('permission', models.CharField(choices=[('Read Only', 'Read Only'), ('Read + Modify', 'Read + Modify')], default='Read + Modify', max_length=30, verbose_name='Permission')),\n ('isOpr', models.BooleanField(default=False, verbose_name='Is Operator')),\n ('isSpl', models.BooleanField(default=False, verbose_name='Is Supplier')),\n ('isCpn', models.BooleanField(default=False, verbose_name='Is Company')),\n ('dateSignUp', models.DateField(auto_now=True, verbose_name='Date Signed Up')),\n ],\n options={\n 'db_table': 'account_table',\n },\n ),\n ]\n" }, { "alpha_fraction": 0.5805298686027527, "alphanum_fraction": 0.6049329042434692, "avg_line_length": 59.394737243652344, "blob_id": "d435542a5e7366bf4f5d325cd43e67fea2c5aaa4", "content_id": "26948dff66cb0f2f857868b1b35f9918ac314c04", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11522, "license_type": "no_license", "max_line_length": 155, "num_lines": 190, "path": "/shipment_forms.py", "repo_name": "LongCohol/HerokuKMOUWebsite", "src_encoding": "UTF-8", "text": "from datetime import date\nfrom bootstrap_datepicker_plus import DatePickerInput\nfrom django import forms\nfrom Shipment.models import Shipment\n\n\nCOMPANY = {\n ('', '------------'),\n ('CENTRA', 'CENTRA'),\n ('CMSHIP', 'CMSHIP'),\n ('DAN MO', 'DAN MO'),\n ('DORVAL', 'DORVAL'),\n ('EUCO', 'EUCO'),\n ('FORTUNE WILL', 'FORTUNE WILL'),\n ('GLOVIS', 'GLOVIS'),\n ('GOLTENS', 'GOLTENS'),\n ('GOWIN', 'GOWIN'),\n ('INTERGIS', 'INTERGIS'),\n ('KLCSM', 'KLCSM'),\n ('KNK', 'KNK'),\n ('KSS', 'KSS'),\n ('MAN', 'MAN'),\n ('MARUBISHI', 'MARUBISHI'),\n ('POSSM', 'POSSM'),\n ('SAEHAN', 'SAEHAN'),\n ('SEOYANG', 'SEOYANG'),\n ('SHI OCEAN', 'SHI OCEAN'),\n ('STX', 'STX'),\n ('SUNAMI', 'SUNAMI'),\n ('SUNRIO', 'SUNRIO'),\n ('보성상사', '보성상사'),\n ('오션마린', '오션마린'),\n ('이강공사', '이강공사'),\n}\nWAREHOUSE = {\n ('SL', 'SL'),\n ('KIM-IGS', 'KIM-IGS'),\n ('ICN-IGS', 'ICN-IGS'),\n}\n\nBY = {\n ('DHL', 'DHL'),\n ('FDX', 'FDX'),\n ('TNT', 'TNT'),\n ('AIR', 'AIR'),\n ('SEA', 'SEA'),\n ('SFX', 'SFX'),\n}\nDIVISION = {\n ('', '---------'),\n (\"D\", \"D\"),\n (\"B\", \"B\"),\n (\"L\", \"L\"),\n}\nFLAG = {\n (\"BLANK\", \"BLANK\"),\n (\"STAY\", \"STAY1\"),\n (\"STAY2\", \"STAY2\"),\n (\"START\", \"START\"),\n (\"COMPLETED\", \"COMPLETED\"),\n}\n\nDATE_ERRORS = {\n 'invalid': \"Wrong date format. Please check (YYYYMMDD)\",\n}\nBRIEF_DATE_ERRORS = {\n 'invalid': \"Must be YYYYMMDD\",\n}\n\n\nclass ShipmentRegistration(forms.ModelForm):\n in_date = forms.DateField(required=False, initial=date.today(), input_formats=['%Y%m%d'],\n widget=forms.DateInput(format='%Y%m%d', attrs={'placeholder': 'YYYYMMDD'}), label=\"IN\",\n error_messages=DATE_ERRORS)\n out_date = forms.DateField(required=False, input_formats=['%Y%m%d'],\n widget=forms.DateInput(format='%Y%m%d', attrs={'placeholder': 'YYYYMMDD'}), label=\"OUT\",\n error_messages=DATE_ERRORS)\n # division = forms.ChoiceField(required=False, choices=DIVISION, label=\"DIVISION\", initial=\"\")\n\n class Meta:\n model = Shipment\n fields = ('company', 'vessel', 'docs', 'odr', 'supplier', 'quanty', 'unit', 'size', 'weight', 'in_date',\n 'warehouse', 'warehouse2', 'by', 'BLno', 'port', 'out_date', 'remark', 'memo',\n 'job_number', 'image', 'image1', 'image2', 'pdf_file', 'division')\n\n def __init__(self, *args, **kwargs):\n # Set up dimension for fields with CSS style\n super(ShipmentRegistration, self).__init__(*args, **kwargs)\n self.fields['docs'].widget.attrs['style'] = 'width: 5cm; height:0.8cm; margin-top:5px; font-size:12px'\n self.fields['odr'].widget.attrs['style'] = 'width: 5cm; height:0.8cm; margin-top:5px; font-size:12px'\n self.fields['supplier'].widget.attrs['style'] = 'width: 5cm; height:0.8cm; margin-top:5px; font-size:12px'\n self.fields['size'].widget.attrs['style'] = 'width: 5cm; height:0.8cm; margin-top:5px; font-size:12px'\n self.fields['remark'].widget.attrs['style'] = 'width: 5cm; height:0.8cm; margin-top:5px; font-size:12px'\n self.fields['memo'].widget.attrs['style'] = 'width: 5cm; height:0.8cm; margin-top:5px; font-size:12px'\n self.fields['image'].widget.attrs['style'] = 'height:0.7cm; font-size: 12px; margin-bottom:1px; font-size:12px'\n self.fields['image1'].widget.attrs['style'] = 'height:0.7cm; font-size: 12px; margin-bottom:1px; font-size:12px'\n self.fields['image2'].widget.attrs['style'] = 'height:0.7cm; font-size: 12px; margin-bottom:1px; font-size:12px'\n self.fields['pdf_file'].widget.attrs['style'] = 'height:0.7cm; font-size: 12px; margin-bottom:1px; font-size:12px'\n\n self.fields['company'].widget.attrs['style'] = 'width: 5cm; height:0.8cm; font-size:12px; text-align-last: center'\n self.fields['vessel'].widget.attrs['style'] = 'width: 4.7cm; height:0.8cm; font-size:12px; text-align: center'\n self.fields['quanty'].widget.attrs['style'] = 'width: 5cm; height:0.8cm; font-size:12px; text-align: center'\n self.fields['unit'].widget.attrs['style'] = 'width: 3cm; height:0.8cm; font-size:12px; text-align: center'\n self.fields['weight'].widget.attrs['style'] = 'width: 5cm; height:0.8cm; font-size:12px; margin-top:5px; margin-bottom:5px; text-align: center'\n self.fields['in_date'].widget.attrs['style'] = 'width: 5cm; height:0.8cm; font-size:12px; text-align: center'\n self.fields['BLno'].widget.attrs['style'] = 'width: 5cm; height:0.8cm; font-size:12px; text-align: center'\n self.fields['port'].widget.attrs['style'] = 'width: 5cm; height:0.8cm; font-size:12px; text-align: center'\n self.fields['out_date'].widget.attrs['style'] = 'width: 5cm; height:0.8cm; font-size:12px; text-align: center'\n self.fields['job_number'].widget.attrs['style'] = 'width: 5cm; height:0.8cm; font-size:12px; text-align: center'\n self.fields['warehouse'].widget.attrs['style'] = 'width: 3cm; height:0.8cm; font-size:12px; margin-top:5px; margin-bottom:5px; text-align: center'\n self.fields['warehouse2'].widget.attrs['style'] = 'width: 3cm; height:0.8cm; font-size:12px; margin-top:5px; margin-bottom:5px; text-align: center'\n self.fields['by'].widget.attrs['style'] = 'width: 3cm; height:0.8cm; font-size:12px; margin-top:5px; margin-bottom:5px; text-align: center'\n self.fields['division'].widget.attrs['style'] = 'width: 2cm; height:0.8cm; font-size:12px; text-align-last: center'\n\n\nclass ShipmentModification(forms.ModelForm):\n in_dateM = forms.DateField(required=False, label=\"Date In\", input_formats=['%Y%m%d'],\n widget=forms.DateInput(format='%Y%m%d', attrs={'placeholder': 'YYYYMMDD'}),\n error_messages=BRIEF_DATE_ERRORS)\n out_dateM = forms.DateField(required=False, label=\"Date Out\", input_formats=['%Y%m%d'],\n widget=forms.DateInput(format='%Y%m%d', attrs={'placeholder': 'YYYYMMDD'}),\n error_messages=BRIEF_DATE_ERRORS)\n # in_dateM = forms.DateField(required=False, input_formats=['%Y%m%d'],\n # widget=DatePickerInput(format='%Y%m%d', attrs={'placeholder': 'YYYYMMDD'}), label=\"Date In\",\n # error_messages=BRIEF_DATE_ERRORS)\n # out_dateM = forms.DateField(required=False, input_formats=['%Y%m%d'],\n # widget=DatePickerInput(format='%Y%m%d', attrs={'placeholder': 'YYYYMMDD'}), label=\"Date Out\",\n # error_messages=BRIEF_DATE_ERRORS)\n\n companyM = forms.ChoiceField(required=False, choices=COMPANY, label=\"COMPANY\", initial=\"\")\n vesselM = forms.CharField(required=False, max_length=100, label=\"VESSEL\")\n supplierM = forms.CharField(required=False, max_length=100, label=\"SUPPLIER\")\n warehouseM = forms.CharField(required=False, max_length=100, label=\"WAREHOUSE\")\n warehouse2M = forms.CharField(required=False, max_length=100, label=\"WAREHOUSE2\")\n divisionM = forms.ChoiceField(required=False, choices=DIVISION, label=\"DIVISION\", initial=\"\")\n flag_statusM = forms.ChoiceField(required=False, choices=FLAG, label=\"STATE\", initial=\"BLANK\")\n job_numberM = forms.CharField(required=False, label=\"JOB.NO\")\n portM = forms.CharField(required=False, label=\"PORT\")\n remarkM = forms.CharField(required=False, label=\"REMARK\")\n memoM = forms.CharField(required=False, label=\"MEMO\")\n docsM = forms.CharField(required=False, label=\"DOC\")\n odrM = forms.CharField(required=False, label=\"ODR\")\n quantyM = forms.CharField(required=False, label=\"QTY\")\n unitM = forms.CharField(required=False, label=\"UNIT\")\n sizeM = forms.CharField(required=False, label=\"SIZE\")\n weightM = forms.CharField(required=False, label=\"WEIGHT\")\n BLnoM = forms.CharField(required=False, label=\"BLNO\")\n\n class Meta:\n model = Shipment\n fields = ['companyM', 'vesselM', 'supplierM', 'warehouseM', 'divisionM', 'flag_statusM',\n 'in_dateM', 'out_dateM', 'job_numberM', 'portM', 'remarkM', 'memoM']\n\n def __init__(self, *args, **kwargs):\n # Set up dimension for fields with CSS style\n super(ShipmentModification, self).__init__(*args, **kwargs)\n self.fields['companyM'].widget.attrs['style'] = 'width: 5cm; height: 0.8cm; font-size:12px; text-align-last:center'\n self.fields['vesselM'].widget.attrs['style'] = 'width: 4.7cm; height: 0.8cm; font-size:12px; text-align:center'\n self.fields['supplierM'].widget.attrs['style'] = 'width: 5cm; height: 0.8cm; font-size:12px; text-align:center'\n self.fields['warehouseM'].widget.attrs['style'] = 'width: 2.7cm; height: 0.8cm; font-size:12px; text-align:center'\n self.fields['warehouse2M'].widget.attrs['style'] = 'width: 2.7cm; height: 0.8cm; font-size:12px; text-align:center'\n self.fields['job_numberM'].widget.attrs['style'] = 'width: 3cm; height: 0.8cm; font-size:12px; text-align:center'\n self.fields['divisionM'].widget.attrs['style'] = 'width: 2cm; height: 0.8cm; font-size:12px; text-align-last:center'\n self.fields['flag_statusM'].widget.attrs['style'] = 'width: 3cm; height: 0.8cm; font-size:12px; text-align-last:center'\n self.fields['in_dateM'].widget.attrs['style'] = 'width: 2.5cm; height: 0.8cm; font-size:12px; text-align:center'\n self.fields['out_dateM'].widget.attrs['style'] = 'width: 2.5cm; height: 0.8cm; font-size:12px; text-align:center'\n self.fields['portM'].widget.attrs['style'] = 'width: 3.5cm; height: 0.8cm; font-size:12px; text-align:center'\n self.fields['remarkM'].widget.attrs['style'] = 'width: 4cm; height:0.8cm; font-size:12px; text-align:center'\n self.fields['memoM'].widget.attrs['style'] = 'width: 4.5cm; height:0.8cm; font-size:12px; text-align:center'\n self.fields['docsM'].widget.attrs['style'] = 'width: 5cm; height: 0.8cm; font-size:12px; text-align:center'\n self.fields['odrM'].widget.attrs['style'] = 'width: 5cm; height: 0.8cm; font-size:12px; text-align:center'\n self.fields['quantyM'].widget.attrs['style'] = 'width: 5cm; height: 0.8cm; font-size:12px; text-align:center'\n self.fields['unitM'].widget.attrs['style'] = 'width: 4.7cm; height: 0.8cm; font-size:12px; text-align:center'\n self.fields['sizeM'].widget.attrs['style'] = 'width: 5cm; height: 0.8cm; font-size:12px; text-align:center'\n self.fields['weightM'].widget.attrs['style'] = 'width: 5cm; height: 0.8cm; font-size:12px; text-align:center'\n self.fields['BLnoM'].widget.attrs['style'] = 'width: 5cm; height: 0.8cm; font-size:12px; text-align:center'\n\n\nclass WarehouseFilter(forms.ModelForm):\n wh = forms.CharField(required=False, label=\"Warehouse\")\n\n class Meta:\n model = Shipment\n # fields = ['companyF', 'supplierF', 'vesselF', 'warehouseF', 'job_numberF', 'in_date_range']\n fields = ['wh']\n\n def __init__(self, *args, **kwargs):\n # Set up dimension for fields with CSS style\n super(WarehouseFilter, self).__init__(*args, **kwargs)\n self.fields['wh'].widget.attrs['style'] = 'width: 3cm; height: 0.8cm; font-size: 12px; text-align : center'" }, { "alpha_fraction": 0.619714081287384, "alphanum_fraction": 0.6387434601783752, "avg_line_length": 52.9782600402832, "blob_id": "50ba94b68713e6253a79fcd0dcc2a789ccf7fef3", "content_id": "cd6c638e075850306c81e8e5453e63f4571aa99b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9980, "license_type": "no_license", "max_line_length": 156, "num_lines": 184, "path": "/Shipment/models.py", "repo_name": "LongCohol/HerokuKMOUWebsite", "src_encoding": "UTF-8", "text": "from bootstrap_datepicker_plus import DatePickerInput\nfrom django.utils.timezone import now\nfrom django.db import models\n\nfrom override_existing import OverrideExisting\n\n# For barcode-printer\nimport barcode\nfrom barcode.writer import ImageWriter\nfrom io import BytesIO\nfrom django.core.files import File\n# For filter form\nimport django_filters\nfrom django_filters.widgets import RangeWidget\n\n\nIMAGE_PATH = \"imageShipment/\"\nPDF_PATH = \"pdfShipment/\"\nBARCODE_PATH = \"barcodeShipment/\"\n\nCOMPANY = {\n ('CENTRA', 'CENTRA'),\n ('CMSHIP', 'CMSHIP'),\n ('DAN MO', 'DAN MO'),\n ('DORVAL', 'DORVAL'),\n ('JW', 'JW'),\n ('EUCO', 'EUCO'),\n ('FORTUNE WILL', 'FORTUNE WILL'),\n ('GLOVIS', 'GLOVIS'),\n ('GOLTENS', 'GOLTENS'),\n ('GOWIN', 'GOWIN'),\n ('INTERGIS', 'INTERGIS'),\n ('KLCSM', 'KLCSM'),\n ('KNK', 'KNK'),\n ('KSS', 'KSS'),\n ('MAN', 'MAN'),\n ('MARUBISHI', 'MARUBISHI'),\n ('POSSM', 'POSSM'),\n ('SAEHAN', 'SAEHAN'),\n ('SEOYANG', 'SEOYANG'),\n ('SHI OCEAN', 'SHI OCEAN'),\n ('STX', 'STX'),\n ('SUNAMI', 'SUNAMI'),\n ('SUNRIO', 'SUNRIO'),\n ('보성상사', '보성상사'),\n ('오션마린', '오션마린'),\n ('이강공사', '이강공사'),\n}\nWAREHOUSE = {\n ('SL', 'SL'),\n ('KIM-IGS', 'KIM-IGS'),\n ('ICN-IGS', 'ICN-IGS'),\n}\n\nBY = {\n ('DHL', 'DHL'),\n ('FDX', 'FDX'),\n ('TNT', 'TNT'),\n ('AIR', 'AIR'),\n ('SEA', 'SEA'),\n ('SFX', 'SFX'),\n}\nDIVISION = {\n (\"D\", \"D\"),\n (\"B\", \"B\"),\n (\"L\", \"L\"),\n}\nFLAG = {\n (\"BLANK\", \"BLANK\"),\n (\"STAY1\", \"STAY1\"),\n (\"STAY2\", \"STAY2\"),\n (\"START\", \"START\"),\n (\"COMPLETED\", \"COMPLETED\"),\n}\n\n\ndef image_path(instance, filename):\n return IMAGE_PATH + '/{0}_{1}_{2}/{3}'.format(instance.company, instance.vessel, instance.in_date, filename)\n\n\ndef pdf_path(instance, filename):\n return PDF_PATH + '/{0}_{1}_{2}/{3}'.format(instance.company, instance.vessel, instance.in_date, filename)\n\n\ndef barcode_path(instance, filename):\n return BARCODE_PATH + '/{0}_{1}_{2}/{3}'.format(instance.company, instance.vessel, instance.in_date, filename)\n\n\nclass Shipment(models.Model):\n number = models.BigAutoField(primary_key=True, db_column=\"no\")\n barcode = models.ImageField(upload_to=barcode_path, db_column=\"barcode\", blank=True, verbose_name=\"Barcode Shipment\",\n storage=OverrideExisting())\n colorpick = models.CharField(blank=True, db_column=\"color_status\", max_length=10, default=\"\")\n\n kantor_id = models.CharField(blank=True, db_column=\"kantor_id\", max_length=40)\n insert_org = models.CharField(blank=True, db_column=\"insert_org\", max_length=100)\n correct_org = models.CharField(blank=True, db_column=\"correct_org\", max_length=100)\n reg_date = models.DateTimeField(auto_now=True, db_column=\"regdate\", max_length=20)\n company = models.CharField(blank=True, db_column=\"company\", choices=COMPANY, max_length=100, verbose_name=\"COMPANY\")\n vessel = models.CharField(blank=True, db_column=\"vessel\", max_length=100, verbose_name=\"VESSEL\")\n by = models.CharField(blank=True, db_column=\"by1\", max_length=50, verbose_name=\"BY\")\n BLno = models.CharField(blank=True, db_column=\"blno\", max_length=50, verbose_name=\"BLNO\")\n docs = models.TextField(blank=True, db_column=\"doc\", max_length=500, verbose_name=\"DOC\")\n odr = models.TextField(blank=True, db_column=\"odr\", max_length=100, verbose_name=\"ODR\")\n supplier = models.TextField(blank=True, db_column=\"supplier\", max_length=100, verbose_name=\"SUPPLIER\")\n quanty = models.CharField(blank=True, db_column=\"qty\", max_length=10, verbose_name=\"QTY\")\n unit = models.CharField(blank=True, db_column=\"unit\", max_length=10, verbose_name=\"UNIT\")\n size = models.TextField(blank=True, db_column=\"size\", max_length=100, verbose_name=\"SIZE\")\n weight = models.CharField(blank=True, db_column=\"weight\", max_length=10, verbose_name=\"WEIGHT\")\n in_date = models.DateField(blank=True, null=True, db_column=\"in1\", max_length=10, verbose_name=\"IN\", default=now)\n warehouse = models.CharField(blank=True, db_column=\"whouse\", max_length=100, verbose_name=\"W/H1\")\n warehouse2 = models.CharField(blank=True, db_column=\"whouse2\", max_length=100, verbose_name=\"W/H2\")\n warehouse_lastupdate = models.CharField(blank=True, db_column=\"whouse_lastupdate\", max_length=100, default=\"\", verbose_name=\"W/H\")\n warehouse2 = models.CharField(blank=True, db_column=\"whouse2\", max_length=100, verbose_name=\"W/H2\")\n wh_timestamp = models.DateTimeField(blank=True, null=True, db_column=\"wh_timestamp1\", max_length=20)\n wh_timestamp2 = models.DateTimeField(blank=True, null=True, db_column=\"wh_timestamp2\", max_length=20)\n port = models.CharField(blank=True, db_column=\"port\", max_length=100, verbose_name=\"PORT\")\n out_date = models.DateField(blank=True, null=True, db_column=\"out1\", max_length=10, verbose_name=\"OUT\")\n remark = models.TextField(blank=True, db_column=\"remark\", max_length=500, verbose_name=\"REMARK\")\n memo = models.TextField(blank=True, db_column=\"memo\", max_length=1000, verbose_name=\"MEMO\")\n image = models.ImageField(upload_to=image_path, db_column=\"img\", blank=True, null=True, verbose_name=\"IMG\",\n storage=OverrideExisting(), default='', max_length=500)\n image1 = models.ImageField(upload_to=image_path, db_column=\"img1\", blank=True, null=True,\n storage=OverrideExisting(), default='', max_length=500)\n image2 = models.ImageField(upload_to=image_path, db_column=\"img2\", blank=True, null=True,\n storage=OverrideExisting(), default='', max_length=500)\n pdf_file = models.FileField(upload_to=pdf_path, db_column=\"pdf\", blank=True, null=True, verbose_name=\"PDF\",\n storage=OverrideExisting(), default='', max_length=500)\n division = models.CharField(blank=True, db_column=\"division\", max_length=10, choices=DIVISION, verbose_name=\"DIVISION\")\n flag_status = models.CharField(blank=True, db_column=\"flg\", max_length=10, choices=FLAG, verbose_name=\"STATE\")\n job_number = models.CharField(blank=True, db_column=\"jobno\", max_length=50, verbose_name=\"JOB.NO\")\n work = models.CharField(blank=True, db_column=\"work\", max_length=10)\n work_regdate = models.DateTimeField(blank=True, null=True, db_column=\"work_regdate\", max_length=20)\n\n def __str__(self):\n return self.number\n\n class Meta:\n db_table = \"pla_databoard\"\n\n def save(self, *args, **kwargs): # overriding save()\n # code_type = barcode.get_barcode_class('code128')\n # byte = BytesIO()\n # if self.odr is None:\n # code = code_type('{0}'.format(0000000000), writer=ImageWriter()).write(byte)\n # else:\n # code = code_type('{0}'.format(self.odr), writer=ImageWriter()).write(byte)\n #\n # self.barcode.save(f'{self.odr}.png', File(byte), save=False)\n return super().save(*args, **kwargs)\n\n\nclass ShipmentFilter(django_filters.FilterSet):\n # company = django_filters.CharFilter(label=\"Company\", lookup_expr=\"icontains\")\n company = django_filters.ChoiceFilter(label=\"Company\", choices=COMPANY)\n supplier = django_filters.CharFilter(label=\"Supplier\", lookup_expr=\"icontains\")\n vessel = django_filters.CharFilter(label=\"Vessel\", lookup_expr=\"icontains\")\n warehouse = django_filters.CharFilter(label=\"Warehouse\", lookup_expr=\"icontains\")\n job_number = django_filters.CharFilter(label=\"JOB.NO\", lookup_expr=\"icontains\")\n division = django_filters.ChoiceFilter(label=\"Division\", choices=DIVISION)\n flag_status = django_filters.ChoiceFilter(label=\"State\", choices=FLAG)\n odr = django_filters.CharFilter(label=\"ODR NO\", lookup_expr=\"icontains\")\n in_date_range = django_filters.DateFromToRangeFilter(field_name='in_date', widget=RangeWidget(attrs={'placeholder': 'YYYYMMDD'}))\n # in_date_range = django_filters.DateFromToRangeFilter(field_name='in_date', widget=DatePickerInput(format='%Y%m%d', attrs={'placeholder': 'YYYYMMDD'}))\n\n class Meta:\n model = Shipment\n # fields = ['companyF', 'supplierF', 'vesselF', 'warehouseF', 'job_numberF', 'in_date_range']\n fields = ['company', 'supplier', 'vessel', 'warehouse', 'job_number', 'in_date_range', 'division', 'flag_status', 'odr']\n\n def __init__(self, *args, **kwargs):\n # Set up dimension for fields with CSS style\n super(ShipmentFilter, self).__init__(*args, **kwargs)\n self.form.fields['in_date_range'].fields[0].input_formats = ['%Y%m%d']\n self.form.fields['in_date_range'].fields[1].input_formats = ['%Y%m%d']\n self.form.fields['in_date_range'].widget.attrs['style'] = 'width: 2cm; height: 0.8cm; font-size: 12px; text-align: center'\n self.form.fields['company'].widget.attrs['style'] = 'width: 3.5cm; height: 0.8cm; font-size: 12px; text-align-last: center'\n self.form.fields['supplier'].widget.attrs['style'] = 'width: 3cm; height: 0.8cm; font-size: 12px; text-align: center'\n self.form.fields['vessel'].widget.attrs['style'] = 'width: 4cm; height: 0.8cm; font-size: 12px; text-align: center'\n self.form.fields['odr'].widget.attrs['style'] = 'width: 3cm; height: 0.8cm; font-size: 12px; text-align: center'\n self.form.fields['warehouse'].widget.attrs['style'] = 'width: 3cm; height: 0.8cm; font-size: 12px; text-align : center'\n self.form.fields['job_number'].widget.attrs['style'] = 'width: 3cm; height: 0.8cm; font-size: 12px; text-align: center'\n self.form.fields['division'].widget.attrs['style'] = 'width: 2.5cm; height: 0.8cm; font-size: 12px; text-align-last: center'\n self.form.fields['flag_status'].widget.attrs['style'] = 'width: 3cm; height: 0.8cm; font-size: 12px; text-align-last: center'\n" }, { "alpha_fraction": 0.6845930218696594, "alphanum_fraction": 0.6860465407371521, "avg_line_length": 30.227272033691406, "blob_id": "f217449d81fa3319c3d907815eeaeb8dfea5402f", "content_id": "4c670d9ddfcfbd5c366b2c2df879a1a4f141ff99", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 688, "license_type": "no_license", "max_line_length": 86, "num_lines": 22, "path": "/override_existing.py", "repo_name": "LongCohol/HerokuKMOUWebsite", "src_encoding": "UTF-8", "text": "import os\nfrom django.core.files.storage import FileSystemStorage\n\n\nclass OverrideExisting(FileSystemStorage):\n \"\"\"\n FileSystemStorage subclass that allows overwrite the already existing\n files.\n\n Be careful using this class, as user-uploaded files will overwrite\n already existing files.\n \"\"\"\n\n # The combination that don't makes os.open() raise OSError if the\n # file already exists before it's opened.\n OS_OPEN_FLAGS = os.O_WRONLY | os.O_TRUNC | os.O_CREAT | getattr(os, 'O_BINARY', 0)\n\n def get_available_name(self, name, max_length=None):\n \"\"\"\n This method will be called before starting the save process.\n \"\"\"\n return name\n\n" }, { "alpha_fraction": 0.5099009871482849, "alphanum_fraction": 0.7049505114555359, "avg_line_length": 17.035715103149414, "blob_id": "8308977f9b63b1f180d61e07244a1124bfcbeef9", "content_id": "cb54963631ca64b9f763ef677f2b768f52e930f2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 1010, "license_type": "no_license", "max_line_length": 39, "num_lines": 56, "path": "/requirements.txt", "repo_name": "LongCohol/HerokuKMOUWebsite", "src_encoding": "UTF-8", "text": "asgiref==3.3.4\nattrs==21.2.0\nautobahn==21.3.1\nAutomat==20.2.0\nbeautifulsoup4==4.9.3\ncertifi==2021.5.30\ncffi==1.14.5\nchannels==3.0.3\nchardet==4.0.0\nconstantly==15.1.0\ncryptography==3.4.7\ndaphne==3.0.2\ndj-database-url==0.5.0\nDjango==3.2\ndjango-bootstrap-datepicker-plus==3.0.5\ndjango-bootstrap4==3.0.1\ndjango-filter==2.4.0\ndjango-heroku==0.3.1\ndjango-user-agents==0.4.0\ndjango-webpush==0.3.3\ngunicorn==20.1.0\nhttp-ece==1.1.0\nhyperlink==21.0.0\nidna==2.10\nimportlib-metadata==2.1.1\nincremental==21.3.0\ninstall==1.3.4\nmysqlclient==2.0.3\nPillow==8.2.0\npsycopg2==2.9.1\npy-vapid==1.8.2\npyasn1==0.4.8\npyasn1-modules==0.2.8\npycparser==2.20\npyOpenSSL==20.0.1\npython-barcode==0.13.1\npytz==2021.1\npywebpush==1.9.4\nPyYAML==5.4.1\nrequests==2.25.1\nschedule==1.1.0\nservice-identity==21.1.0\nsix==1.16.0\nsoupsieve==2.2.1\nsqlparse==0.4.1\nTwisted==21.2.0\ntxaio==21.2.1\ntyping-extensions==3.7.4.3\nua-parser==0.10.0\nurllib3==1.26.5\nuser-agents==2.2.0\nwhitenoise==5.2.0\nXlsxWriter==1.4.0\nxlwt==1.3.0\nzipp==3.4.1\nzope.interface==5.4.0\n" }, { "alpha_fraction": 0.6961815357208252, "alphanum_fraction": 0.6978417038917542, "avg_line_length": 26.799999237060547, "blob_id": "e2e2b8355d6c3e460b6f0890e92734377596c8f2", "content_id": "3a450e19a34dc6bed0d53d208efffc7222e754bf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1807, "license_type": "no_license", "max_line_length": 86, "num_lines": 65, "path": "/Shipment/views.py", "repo_name": "LongCohol/HerokuKMOUWebsite", "src_encoding": "UTF-8", "text": "from django.core.paginator import Paginator\n\nfrom Shipment.models import Shipment, ShipmentFilter\nfrom shipment_forms import ShipmentRegistration, ShipmentModification, WarehouseFilter\n\n\nRESULT_PER_PAGE = 100\n\n\ndef shipmentRegisterView(request):\n # IF this is POST request:\n if request.method == \"POST\":\n formShipmentRegister = ShipmentRegistration(request.POST, request.FILES)\n # ELSE when this is GET request:\n else:\n formShipmentRegister = ShipmentRegistration()\n\n result = {\n \"shipmentRegister\": formShipmentRegister,\n }\n return result\n\n\ndef shipmentFilterView(request):\n shipmentList = Shipment.objects.all().order_by(\"-number\")\n formShipmentFilter = ShipmentFilter(request.GET, queryset=shipmentList)\n warehouseFilter = request.GET.get('wh')\n\n paginator = Paginator(formShipmentFilter.qs, RESULT_PER_PAGE)\n page = request.GET.get('page')\n pagination = paginator.get_page(page)\n result = {\n \"shipmentFilter\": formShipmentFilter,\n \"shipmentDisplay\": pagination,\n \"shipmentResults\": formShipmentFilter.qs,\n }\n return result\n\n\ndef warehouseFilterView(request):\n # IF this is POST request:\n if request.method == \"POST\":\n warehouseFilter = WarehouseFilter(request.POST)\n # ELSE when this is GET request:\n else:\n warehouseFilter = WarehouseFilter(request.GET)\n\n result = {\n \"warehouseFilter\": warehouseFilter,\n }\n return result\n\n\ndef shipmentModifyView(request):\n # IF this is POST request:\n if request.method == \"POST\":\n formShipmentModify = ShipmentModification(request.POST)\n # ELSE when this is GET request:\n else:\n formShipmentModify = ShipmentModification()\n\n result = {\n \"shipmentModify\": formShipmentModify,\n }\n return result\n" }, { "alpha_fraction": 0.6726707816123962, "alphanum_fraction": 0.678260862827301, "avg_line_length": 38.26829147338867, "blob_id": "2ee9e276a0262c147d6d1aa02a99103c1770441f", "content_id": "54eedfe27b7cd6e8115c76f537def54266c155db", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1610, "license_type": "no_license", "max_line_length": 115, "num_lines": 41, "path": "/User/models.py", "repo_name": "LongCohol/HerokuKMOUWebsite", "src_encoding": "UTF-8", "text": "from django.db import models\nfrom django.contrib.auth.models import AbstractBaseUser, BaseUserManager\n\n\nPERMISSION = {\n (\"Read Only\", \"Read Only\"),\n (\"Read + Modify\", \"Read + Modify\"),\n}\n# ROLE = {\n# (\"Operator\", \"Operator\"),\n# (\"Supplier\", \"Supplier\"),\n# (\"Company\", \"Company\"),\n# }\n\nMAX_LENGTH = 30\nMAX_VESSEL_LENGTH = 1200\n\n\nclass Account(AbstractBaseUser):\n userID = models.CharField(verbose_name=\"Account ID\", max_length=MAX_LENGTH, default=None,\n unique=True)\n\n password = models.CharField(verbose_name=\"Password\", max_length=100, default=None)\n rawPassword = models.CharField(verbose_name=\"Raw Password\", max_length=MAX_LENGTH, default=None)\n email = models.EmailField(verbose_name=\"Email\", max_length=MAX_LENGTH, default=\"\", blank=True)\n companyName = models.CharField(verbose_name=\"Company\", max_length=MAX_LENGTH, default=\"\", blank=True)\n vesselList = models.TextField(verbose_name=\"Vessel List\", max_length=MAX_VESSEL_LENGTH, default=\"\", blank=True)\n\n permission = models.CharField(verbose_name=\"Permission\", max_length=MAX_LENGTH, default=\"Read + Modify\",\n choices=PERMISSION)\n isOpr = models.BooleanField(verbose_name=\"Is Operator\", default=False)\n isSpl = models.BooleanField(verbose_name=\"Is Supplier\", default=False)\n isCpn = models.BooleanField(verbose_name=\"Is Company\", default=False)\n dateSignUp = models.DateField(verbose_name=\"Date Signed Up\", auto_now=True)\n\n objects = BaseUserManager()\n\n USERNAME_FIELD = 'userID'\n\n class Meta:\n db_table = \"account_table\"\n" }, { "alpha_fraction": 0.5950930714607239, "alphanum_fraction": 0.6155668497085571, "avg_line_length": 94.32257843017578, "blob_id": "c7db0c3ed1a90f40d7b254bb89c94c3c62eee791", "content_id": "d07b8d3c9652dd2bce13d59601f5bb0ceeaa9d77", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5958, "license_type": "no_license", "max_line_length": 679, "num_lines": 62, "path": "/Shipment/migrations/0001_initial.py", "repo_name": "LongCohol/HerokuKMOUWebsite", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2 on 2021-06-27 21:12\n\nimport Shipment.models\nfrom django.db import migrations, models\nimport django.utils.timezone\nimport override_existing\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='Shipment',\n fields=[\n ('number', models.BigAutoField(db_column='no', primary_key=True, serialize=False)),\n ('barcode', models.ImageField(blank=True, db_column='barcode', storage=override_existing.OverrideExisting(), upload_to=Shipment.models.barcode_path, verbose_name='Barcode Shipment')),\n ('colorpick', models.CharField(blank=True, db_column='color_status', default='', max_length=10)),\n ('kantor_id', models.CharField(blank=True, db_column='kantor_id', max_length=40)),\n ('insert_org', models.CharField(blank=True, db_column='insert_org', max_length=100)),\n ('correct_org', models.CharField(blank=True, db_column='correct_org', max_length=100)),\n ('reg_date', models.DateTimeField(auto_now=True, db_column='regdate', max_length=20)),\n ('company', models.CharField(blank=True, choices=[('SEOYANG', 'SEOYANG'), ('SUNAMI', 'SUNAMI'), ('MAN', 'MAN'), ('CENTRA', 'CENTRA'), ('CMSHIP', 'CMSHIP'), ('보성상사', '보성상사'), ('POSSM', 'POSSM'), ('DORVAL', 'DORVAL'), ('GLOVIS', 'GLOVIS'), ('MARUBISHI', 'MARUBISHI'), ('이강공사', '이강공사'), ('DAN MO', 'DAN MO'), ('EUCO', 'EUCO'), ('KNK', 'KNK'), ('KSS', 'KSS'), ('SUNRIO', 'SUNRIO'), ('FORTUNE WILL', 'FORTUNE WILL'), ('GOLTENS', 'GOLTENS'), ('SHI OCEAN', 'SHI OCEAN'), ('SAEHAN', 'SAEHAN'), ('JW', 'JW'), ('INTERGIS', 'INTERGIS'), ('KLCSM', 'KLCSM'), ('오션마린', '오션마린'), ('STX', 'STX'), ('GOWIN', 'GOWIN')], db_column='company', max_length=100, verbose_name='COMPANY')),\n ('vessel', models.CharField(blank=True, db_column='vessel', max_length=100, verbose_name='VESSEL')),\n ('by', models.CharField(blank=True, db_column='by1', max_length=50, verbose_name='BY')),\n ('BLno', models.CharField(blank=True, db_column='blno', max_length=50, verbose_name='BLNO')),\n ('docs', models.TextField(blank=True, db_column='doc', max_length=500, verbose_name='DOC')),\n ('odr', models.TextField(blank=True, db_column='odr', max_length=100, verbose_name='ODR')),\n ('supplier', models.TextField(blank=True, db_column='supplier', max_length=100, verbose_name='SUPPLIER')),\n ('quanty', models.CharField(blank=True, db_column='qty', max_length=10, verbose_name='QTY')),\n ('unit', models.CharField(blank=True, db_column='unit', max_length=10, verbose_name='UNIT')),\n ('size', models.TextField(blank=True, db_column='size', max_length=100, verbose_name='SIZE')),\n ('weight', models.CharField(blank=True, db_column='weight', max_length=10, verbose_name='WEIGHT')),\n ('in_date', models.DateField(blank=True, db_column='in1', default=django.utils.timezone.now, max_length=10, null=True, verbose_name='IN')),\n ('warehouse', models.CharField(blank=True, db_column='whouse', max_length=100, verbose_name='W/H1')),\n ('warehouse_lastupdate', models.CharField(blank=True, db_column='whouse_lastupdate', default='', max_length=100, verbose_name='W/H')),\n ('warehouse2', models.CharField(blank=True, db_column='whouse2', max_length=100, verbose_name='W/H2')),\n ('wh_timestamp', models.DateTimeField(blank=True, db_column='wh_timestamp1', max_length=20, null=True)),\n ('wh_timestamp2', models.DateTimeField(blank=True, db_column='wh_timestamp2', max_length=20, null=True)),\n ('port', models.CharField(blank=True, db_column='port', max_length=100, verbose_name='PORT')),\n ('out_date', models.DateField(blank=True, db_column='out1', max_length=10, null=True, verbose_name='OUT')),\n ('remark', models.TextField(blank=True, db_column='remark', max_length=500, verbose_name='REMARK')),\n ('memo', models.TextField(blank=True, db_column='memo', max_length=1000, verbose_name='MEMO')),\n ('image', models.ImageField(blank=True, db_column='img', default='', max_length=500, null=True, storage=override_existing.OverrideExisting(), upload_to=Shipment.models.image_path, verbose_name='IMG')),\n ('image1', models.ImageField(blank=True, db_column='img1', default='', max_length=500, null=True, storage=override_existing.OverrideExisting(), upload_to=Shipment.models.image_path)),\n ('image2', models.ImageField(blank=True, db_column='img2', default='', max_length=500, null=True, storage=override_existing.OverrideExisting(), upload_to=Shipment.models.image_path)),\n ('pdf_file', models.FileField(blank=True, db_column='pdf', default='', max_length=500, null=True, storage=override_existing.OverrideExisting(), upload_to=Shipment.models.pdf_path, verbose_name='PDF')),\n ('division', models.CharField(blank=True, choices=[('B', 'B'), ('L', 'L'), ('D', 'D')], db_column='division', max_length=10, verbose_name='DIVISION')),\n ('flag_status', models.CharField(blank=True, choices=[('STAY2', 'STAY2'), ('COMPLETED', 'COMPLETED'), ('BLANK', 'BLANK'), ('STAY1', 'STAY1'), ('START', 'START')], db_column='flg', max_length=10, verbose_name='STATE')),\n ('job_number', models.CharField(blank=True, db_column='jobno', max_length=50, verbose_name='JOB.NO')),\n ('work', models.CharField(blank=True, db_column='work', max_length=10)),\n ('work_regdate', models.DateTimeField(blank=True, db_column='work_regdate', max_length=20, null=True)),\n ],\n options={\n 'db_table': 'pla_databoard',\n },\n ),\n ]\n" }, { "alpha_fraction": 0.6825612783432007, "alphanum_fraction": 0.6852861046791077, "avg_line_length": 35.70000076293945, "blob_id": "932a7a95519617b780892da1c2c08c1e30e9082a", "content_id": "155fbffa8963d14f8186e3281e367fb3ed51b0ff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 734, "license_type": "no_license", "max_line_length": 100, "num_lines": 20, "path": "/Supplier/models.py", "repo_name": "LongCohol/HerokuKMOUWebsite", "src_encoding": "UTF-8", "text": "# from django.conf import settings\n# from django.db import models\n# from django.contrib.auth.models import AbstractBaseUser\n#\n#\n# MAX_LENGTH = 30\n#\n#\n# class Supplier(AbstractBaseUser):\n# user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, primary_key=True)\n# supplierID = models.CharField(verbose_name=\"Supplier ID\", max_length=MAX_LENGTH, default=None,\n# unique=True)\n#\n# supplierName = models.CharField(verbose_name=\"Supplier\", max_length=MAX_LENGTH, default=None)\n# SPLpassword = models.CharField(verbose_name=\"Password\", max_length=MAX_LENGTH, default=None)\n#\n# USERNAME_FIELD = 'supplierID'\n#\n# class Meta:\n# db_table = \"supplier_table\"\n" }, { "alpha_fraction": 0.5822927951812744, "alphanum_fraction": 0.5899423956871033, "avg_line_length": 44.83490753173828, "blob_id": "ad278de4112ed591a5fa80a1f0616df159728cd5", "content_id": "d432822ebec43022be4b573e045a4145a167e09a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 29152, "license_type": "no_license", "max_line_length": 138, "num_lines": 636, "path": "/Operator/views.py", "repo_name": "LongCohol/HerokuKMOUWebsite", "src_encoding": "UTF-8", "text": "import xlwt\nfrom PIL import Image\nfrom django.conf import settings\nfrom django.contrib import messages\n\nfrom django.contrib.auth.hashers import make_password\nfrom django.http import HttpResponse\nfrom django.shortcuts import render, redirect\nfrom django.core.paginator import Paginator\nfrom django.contrib.auth import login, logout, authenticate\nfrom django_user_agents.utils import get_user_agent\n\nfrom django.core import serializers\nfrom django.core.serializers import json\nimport json\n\nfrom Shipment.views import shipmentRegisterView, shipmentFilterView, warehouseFilterView, shipmentModifyView\nfrom User.models import Account\nfrom Shipment.models import ShipmentFilter, Shipment\n# from Operator.models import Operator\n# from Supplier.models import Supplier\n# from Company.models import Company\nfrom account_forms import OperatorForm, SupplierForm, CompanyForm, OperatorLogin, CustomerLogin\nfrom shipment_forms import ShipmentRegistration, ShipmentModification, WarehouseFilter\n\nfrom datetime import datetime\n\n\nCONTEXT = {}\n\nUNIT_LIST = ['Select', 'CT', 'PL', 'WC', 'PKG']\nWAREHOUSE_LIST = ['Select', 'SL', 'KIM-IGS', 'ICN-IGS']\nBY_LIST = ['Select', 'DHL', 'FDX', 'TNT', 'AIR', 'SEA', 'SFX']\n\nRESULT_PER_PAGE = 100\nVESSEL_KEY = {}\n# accounts = Account.objects.all()\n# shipments = Shipment.objects.all()\nCOMP_LIST = []\nVESS_LIST = []\n# for account in accounts:\n# if account.vesselList is not \"\":\n# vesselowned = []\n# companyOwner = account.companyName\n# COMP_LIST.append(companyOwner)\n# vesselOwned = account.vesselList.split(',')\n# for vessel in vesselOwned:\n# vesselowned.append(vessel)\n# VESS_LIST.append(vessel)\n# VESSEL_KEY[vessel] = companyOwner\n# CONTEXT[vessel] = companyOwner\n\n# CONTEXT[\"allCompanies\"] = COMP_LIST\n# CONTEXT[\"allVessels\"] = VESS_LIST\n# CONTEXT[\"allBys\"] = BY_LIST\n# CONTEXT[\"allUnits\"] = UNIT_LIST\n# CONTEXT[\"allWarehouses\"] = WAREHOUSE_LIST\n# CONTEXT[\"totalShipments\"] = len(shipments)\nVESSEL_KEY_JS = json.dumps(CONTEXT)\n# CONTEXT[\"vessel_key\"] = VESSEL_KEY_JS\n\n\ndef adminView(request):\n oprForm = OperatorForm()\n splForm = SupplierForm()\n cpnForm = CompanyForm()\n\n # IF this is POST request:\n if request.method == \"POST\":\n oprID = request.POST.get('oprID')\n splID = request.POST.get('splID')\n cpnID = request.POST.get('cpnID')\n oprNumber = len(Account.objects.filter(userID__exact=oprID))\n splNumber = len(Account.objects.filter(userID__exact=splID))\n cpnNumber = len(Account.objects.filter(userID__exact=cpnID))\n\n # oprID = request.POST.get('operatorID')\n # splID = request.POST.get('supplierID')\n # cpnID = request.POST.get('companyID')\n\n if 'saveOperator' in request.POST:\n oprForm = OperatorForm(request.POST)\n\n if oprForm.is_valid() and oprNumber<1:\n oprForm.save()\n else:\n opr = Account.objects.get(userID__exact=oprID)\n # opr.userID = oprID\n opr.password = request.POST['oprPassword']\n opr.permission = request.POST['oprPermission']\n opr.isOpr = True\n opr.rawPassword = opr.password\n opr.password = make_password(opr.password)\n opr.save()\n # opr = Operator.objects.get(operatorID__exact=oprID)\n # opr.OPRpassword = request.POST['OPRpassword']\n # opr.permission = request.POST['permission']\n # opr.save()\n oprForm = OperatorForm()\n\n if 'saveSupplier' in request.POST:\n splForm = SupplierForm(request.POST)\n\n if splForm.is_valid() and splNumber<1:\n splForm.save()\n else:\n spl = Account.objects.get(userID__exact=splID)\n # spl.userID = splID\n spl.password = request.POST['splPassword']\n spl.companyName = request.POST['splName']\n spl.isSpl = True\n spl.rawPassword = spl.password\n spl.password = make_password(spl.password)\n spl.save()\n # spl = Supplier.objects.get(supplierID__exact=splID)\n # spl.supplierName = request.POST['supplierName']\n # spl.SPLpassword = request.POST['SPLpassword']\n # spl.save()\n splForm = SupplierForm()\n\n if 'saveCompany' in request.POST:\n cpnForm = CompanyForm(request.POST)\n\n if cpnForm.is_valid() and cpnNumber<1:\n cpnForm.save()\n else:\n cpn = Account.objects.get(userID__exact=cpnID)\n # cpn.userID = cpnID\n cpn.password = request.POST['cpnPassword']\n cpn.email = request.POST['cpnEmail']\n cpn.companyName = request.POST['cpnName']\n cpn.vesselList = request.POST['cpnVesselList']\n cpn.isCpn = True\n cpn.rawPassword = cpn.password\n cpn.password = make_password(cpn.password)\n cpn.save()\n # cpn = Company.objects.get(companyID__exact=cpnID)\n # cpn.companyName = request.POST['companyName']\n # cpn.companyEmail = request.POST['companyEmail']\n # cpn.CPNpassword = request.POST['CPNpassword']\n # cpn.vesselList = request.POST['vesselList']\n # cpn.save()\n cpnForm = CompanyForm()\n\n if 'deleteOperator' in request.POST:\n Account.objects.filter(userID__exact=oprID).delete()\n if 'deleteSupplier' in request.POST:\n Account.objects.filter(userID__exact=splID).delete()\n if 'deleteCompany' in request.POST:\n Account.objects.filter(userID__exact=cpnID).delete()\n\n # if 'deleteOperator' in request.POST:\n # Operator.objects.filter(operatorID__exact=oprID).delete()\n # if 'deleteSupplier' in request.POST:\n # Supplier.objects.filter(supplierID__exact=splID).delete()\n # if 'deleteCompany' in request.POST:\n # Company.objects.filter(companyID__exact=cpnID).delete()\n # ELSE when this is GET request:\n else:\n oprForm = OperatorForm()\n splForm = SupplierForm()\n cpnForm = CompanyForm()\n\n operators = Account.objects.filter(isOpr=True)\n suppliers = Account.objects.filter(isSpl=True)\n companies = Account.objects.filter(isCpn=True)\n oprs = serializers.serialize(\"json\", operators)\n spls = serializers.serialize(\"json\", suppliers)\n cpns = serializers.serialize(\"json\", companies)\n CONTEXT['operatorList'] = oprs\n CONTEXT['supplierList'] = spls\n CONTEXT['companyList'] = cpns\n\n # oprList = serializers.serialize(\"json\", Operator.objects.all())\n # splList = serializers.serialize(\"json\", Supplier.objects.all())\n # cpnList = serializers.serialize(\"json\", Company.objects.all())\n # CONTEXT['operatorList'] = oprList\n # CONTEXT['supplierList'] = splList\n # CONTEXT['companyList'] = cpnList\n\n CONTEXT['operatorForm'] = oprForm\n CONTEXT['supplierForm'] = splForm\n CONTEXT['companyForm'] = cpnForm\n\n return render(request, \"adminPage.html\", CONTEXT)\n\n\ndef logged_out(request):\n logout(request)\n\n return redirect('frontpage')\n\n\ndef staff_in(request):\n CONTEXT[\"staffExist\"] = request.user\n return render(request, \"redirect1.html\", CONTEXT)\n\n\ndef customer_in(request):\n CONTEXT[\"customerExist\"] = request.user\n return render(request, \"redirect2.html\", CONTEXT)\n\n\ndef frontView(request):\n # staffloginview_result = staffLoginView(request)\n # customerloginview_result = customerLoginView(request)\n # CONTEXT.update(staffloginview_result)\n # CONTEXT.update(customerloginview_result)\n\n if request.method == \"POST\":\n # check the button of staffLogin.html\n if \"staffloginform\" in request.POST:\n formOperatorLogin = OperatorLogin(request.POST)\n if formOperatorLogin.is_valid():\n username = formOperatorLogin.cleaned_data.get('userIDstaff')\n password = formOperatorLogin.cleaned_data.get('passwordstaff')\n accountstaff = authenticate(request, userID=username, password=password)\n # if there exists an account with the authentication\n if accountstaff is not None:\n login(request, accountstaff)\n return redirect('staff_redirect')\n\n if \"customerloginform\" in request.POST:\n formCustomerLogin = CustomerLogin(request.POST)\n if formCustomerLogin.is_valid():\n username = formCustomerLogin.cleaned_data.get('userIDcustomer')\n password = formCustomerLogin.cleaned_data.get('passwordcustomer')\n accountcustomer = authenticate(request, userID=username, password=password)\n # if there exists an account with the authentication\n if accountcustomer is not None:\n login(request, accountcustomer)\n return redirect('customer_redirect')\n\n else:\n # CONTEXT[\"staffExist\"] = request.user\n # CONTEXT[\"customerExist\"] = request.user\n CONTEXT[\"staffLogin\"] = OperatorLogin()\n CONTEXT[\"customerLogin\"] = CustomerLogin()\n return render(request, \"frontpage.html\", CONTEXT)\n\n\n# def staffLoginView(request):\n# # IF this is POST request:\n# if request.method == \"POST\":\n# account = None\n# formOperatorLogin = OperatorLogin(request.POST)\n#\n# if formOperatorLogin.is_valid():\n# username = formOperatorLogin.cleaned_data.get('userIDstaff')\n# password = formOperatorLogin.cleaned_data.get('passwordstaff')\n# account = authenticate(request, userID=username, password=password)\n# # ELSE when this is GET request:\n# else:\n# account = request.user\n# formOperatorLogin = OperatorLogin()\n#\n# result = {\n# \"staffLogin\": formOperatorLogin,\n# \"staffExist\": account,\n# }\n# return result\n#\n#\n# def customerLoginView(request):\n# # IF this is POST request:\n# if request.method == \"POST\":\n# accountcustomer = None\n# formCustomerLogin = CustomerLogin(request.POST)\n#\n# if formCustomerLogin.is_valid():\n# username = formCustomerLogin.cleaned_data.get('userIDcustomer')\n# password = formCustomerLogin.cleaned_data.get('passwordcustomer')\n# accountcustomer = authenticate(request, userID=username, password=password)\n# # ELSE when this is GET request:\n# else:\n# accountcustomer = request.user\n# formCustomerLogin = CustomerLogin()\n#\n# result = {\n# \"customerLogin\": formCustomerLogin,\n# \"customerExist\": accountcustomer,\n# }\n# return result\n\n\ndef shipment_print(request):\n context = {}\n id_tobeprinted = []\n vessel_tobeprinted = \"\"\n shipment_tobeprinted = []\n total_quanty = 0\n total_weight = 0\n\n if request.method == \"POST\" and \"htmlShipment\" in request.POST:\n shipmentselectedlist = request.POST.get('checkedList')\n shipmentprintedlist = shipmentselectedlist.split(',')\n for sh in shipmentprintedlist:\n if sh is not \"\":\n sh = int(sh)\n shipment = Shipment.objects.get(number__exact=sh)\n id_tobeprinted.append(sh)\n shipment_tobeprinted.append(shipment)\n if shipment.vessel not in vessel_tobeprinted:\n vessel_tobeprinted += \"/ \" + shipment.vessel\n if shipment.quanty is not \"\":\n total_quanty += int(shipment.quanty)\n if shipment.weight is not \"\":\n total_weight += int(shipment.weight)\n\n vessel_tobeprinted = vessel_tobeprinted[1:]\n context['totalQuanty'] = total_quanty\n context['totalWeight'] = total_weight\n context['vesselPrinted'] = vessel_tobeprinted\n context['shipmentPrinted'] = shipment_tobeprinted\n return context\n\n\ndef mainView1(request):\n useragent = get_user_agent(request)\n\n shipmentmodifyview_result = shipmentModifyView(request)\n CONTEXT.update(shipmentmodifyview_result)\n shipmentregisterview_result = shipmentRegisterView(request)\n CONTEXT.update(shipmentregisterview_result)\n\n shipmentfilterview_result = shipmentFilterView(request)\n warehousefilterview_result = warehouseFilterView(request)\n CONTEXT.update(shipmentfilterview_result)\n CONTEXT.update(warehousefilterview_result)\n # shipmentresults = CONTEXT[\"shipmentDisplay\"]\n shipmentresults = CONTEXT[\"shipmentResults\"]\n CONTEXT[\"totalResults\"] = len(shipmentresults)\n\n warehousefiltered = request.GET.get('wh')\n\n if request.method == \"POST\":\n shipmentselectedlist = request.POST.get('checkedList')\n shipmentadjustedlist = request.POST.get('changedList')\n\n colorpicked = request.POST.get('pickedColor')\n\n # check the button on mainPage1.html\n if \"addShipment\" in request.POST:\n # load the form from the CONTEXT\n shipmentregisterform = CONTEXT[\"shipmentRegister\"]\n if shipmentregisterform.is_valid():\n shipment = shipmentregisterform.save()\n if shipment.warehouse is not \"\":\n shipment.wh_timestamp = datetime.now()\n if shipment.warehouse2 is not \"\":\n shipment.wh_timestamp2 = datetime.now()\n shipment.insert_org = request.user.userID\n shipment.save()\n messages.success(request, 'New shipment with id ' + str(shipment.number) + ' has been added successfully')\n CONTEXT[\"shipmentRegister\"] = ShipmentRegistration()\n return redirect('mainPage1')\n\n if \"changeShipment\" in request.POST:\n # load the form from the CONTEXT\n shipmentadjustedlist = int(shipmentadjustedlist)\n shipmentregisterform = CONTEXT[\"shipmentRegister\"]\n if shipmentregisterform.is_valid():\n modified_company = shipmentregisterform.cleaned_data.get('company')\n modified_vessel = shipmentregisterform.cleaned_data.get('vessel')\n modified_docs = shipmentregisterform.cleaned_data.get('docs')\n modified_odr = shipmentregisterform.cleaned_data.get('odr')\n modified_supplier = shipmentregisterform.cleaned_data.get('supplier')\n modified_quanty = shipmentregisterform.cleaned_data.get('quanty')\n modified_unit = shipmentregisterform.cleaned_data.get('unit')\n modified_size = shipmentregisterform.cleaned_data.get('size')\n modified_weight = shipmentregisterform.cleaned_data.get('weight')\n modified_in_date = shipmentregisterform.cleaned_data.get('in_date')\n modified_warehouse = shipmentregisterform.cleaned_data.get('warehouse')\n modified_warehouse2 = shipmentregisterform.cleaned_data.get('warehouse2')\n modified_by = shipmentregisterform.cleaned_data.get('by')\n modified_BLno = shipmentregisterform.cleaned_data.get('BLno')\n modified_port = shipmentregisterform.cleaned_data.get('port')\n modified_out_date = shipmentregisterform.cleaned_data.get('out_date')\n modified_remark = shipmentregisterform.cleaned_data.get('remark')\n modified_division = shipmentregisterform.cleaned_data.get('division')\n modified_job_number = shipmentregisterform.cleaned_data.get('job_number')\n\n # check the shipment to update\n shipment = Shipment.objects.get(number__exact=shipmentadjustedlist)\n shipment.company = modified_company\n shipment.vessel = modified_vessel\n shipment.docs = modified_docs\n shipment.odr = modified_odr\n shipment.supplier = modified_supplier\n shipment.quanty = modified_quanty\n shipment.unit = modified_unit\n shipment.size = modified_size\n shipment.weight = modified_weight\n shipment.in_date = modified_in_date\n shipment.warehouse = modified_warehouse\n shipment.warehouse2 = modified_warehouse2\n shipment.by = modified_by\n shipment.BLno = modified_BLno\n shipment.port = modified_port\n shipment.out_date = modified_out_date\n shipment.remark = modified_remark\n shipment.division = modified_division\n shipment.job_number = modified_job_number\n\n shipment.correct_org = request.user.userID\n shipment.save()\n\n if \"addShipment_m\" in request.POST:\n # load the form from the CONTEXT\n shipmentregisterform = CONTEXT[\"shipmentRegister\"]\n if shipmentregisterform.is_valid():\n shipment_m = shipmentregisterform.save()\n shipment_m.insert_org = request.user.userID\n shipment_m.save()\n messages.success(request, 'New shipment with id ' + str(shipment_m.number) + ' has been added successfully')\n\n CONTEXT[\"shipmentRegister\"] = ShipmentRegistration()\n return redirect('mainPage1')\n\n if \"modifyShipment\" in request.POST:\n shipmentchangedlist = shipmentselectedlist.split(',')\n shipmentmodifyform = CONTEXT[\"shipmentModify\"]\n if shipmentmodifyform.is_valid():\n changed_company = shipmentmodifyform.cleaned_data.get('companyM')\n changed_vessel = shipmentmodifyform.cleaned_data.get('vesselM')\n changed_supplier = shipmentmodifyform.cleaned_data.get('supplierM')\n changed_warehouse = shipmentmodifyform.cleaned_data.get('warehouseM')\n changed_warehouse2 = shipmentmodifyform.cleaned_data.get('warehouse2M')\n changed_division = shipmentmodifyform.cleaned_data.get('divisionM')\n changed_flag_status = shipmentmodifyform.cleaned_data.get('flag_statusM')\n changed_in_date = shipmentmodifyform.cleaned_data.get('in_dateM')\n changed_out_date = shipmentmodifyform.cleaned_data.get('out_dateM')\n changed_job_number = shipmentmodifyform.cleaned_data.get('job_numberM')\n changed_port = shipmentmodifyform.cleaned_data.get('portM')\n changed_remark = shipmentmodifyform.cleaned_data.get('remarkM')\n changed_memo = shipmentmodifyform.cleaned_data.get('memoM')\n changed_docs = shipmentmodifyform.cleaned_data.get('docsM')\n changed_odr = shipmentmodifyform.cleaned_data.get('odrM')\n changed_quanty = shipmentmodifyform.cleaned_data.get('quantyM')\n changed_unit = shipmentmodifyform.cleaned_data.get('unitM')\n changed_size = shipmentmodifyform.cleaned_data.get('sizeM')\n changed_weight = shipmentmodifyform.cleaned_data.get('weightM')\n changed_BLno = shipmentmodifyform.cleaned_data.get('BLnoM')\n changed_colorpick = colorpicked\n\n for id_shipmentchanged in shipmentchangedlist:\n if id_shipmentchanged == \"\":\n pass\n else:\n id_shipmentchanged = int(id_shipmentchanged)\n shipmentchanged = Shipment.objects.get(number=id_shipmentchanged)\n if changed_company is not \"\":\n shipmentchanged.company = changed_company\n if changed_vessel is not \"\":\n shipmentchanged.vessel = changed_vessel\n if changed_supplier is not \"\":\n shipmentchanged.supplier = changed_supplier\n if changed_warehouse is not \"\":\n shipmentchanged.warehouse = changed_warehouse\n if changed_warehouse2 is not \"\":\n shipmentchanged.warehouse2 = changed_warehouse2\n if changed_division is not \"\":\n shipmentchanged.division = changed_division\n if changed_flag_status is not \"\":\n shipmentchanged.flag_status = changed_flag_status\n if changed_in_date is not \"\":\n shipmentchanged.in_date = changed_in_date\n # change_in_date = changed_in_date[:4] + \"-\" + changed_in_date[4:6] + \"-\" + changed_in_date[6:8]\n # shipmentchanged.in_date = change_in_date\n if changed_out_date is not \"\":\n shipmentchanged.out_date = changed_out_date\n # change_out_date = changed_out_date[:4] + \"-\" + changed_out_date[4:6] + \"-\" + changed_out_date[6:8]\n # shipmentchanged.out_date = change_out_date\n if changed_job_number is not \"\":\n shipmentchanged.job_number = changed_job_number\n if changed_port is not \"\":\n shipmentchanged.port = changed_port\n if changed_remark is not \"\":\n shipmentchanged.remark = changed_remark\n if changed_memo is not \"\":\n shipmentchanged.remark = changed_memo\n if changed_docs is not \"\":\n shipmentchanged.docs = changed_docs\n if changed_odr is not \"\":\n shipmentchanged.odr = changed_odr\n if changed_quanty is not \"\":\n shipmentchanged.quanty = changed_quanty\n if changed_unit is not \"\":\n shipmentchanged.unit = changed_unit\n if changed_size is not \"\":\n shipmentchanged.size = changed_size\n if changed_weight is not \"\":\n shipmentchanged.weight = changed_weight\n if changed_BLno is not \"\":\n shipmentchanged.BLno = changed_BLno\n shipmentchanged.colorpick = changed_colorpick\n\n shipmentchanged.correct_org = request.user.userID\n shipmentchanged.save()\n CONTEXT[\"shipmentModify\"] = ShipmentModification()\n\n if \"deleteShipment\" in request.POST:\n shipmentchangedlist = shipmentselectedlist.split(',')\n\n for id_shipmentdeleted in shipmentchangedlist:\n if id_shipmentdeleted == \"\":\n pass\n else:\n id_shipmentdeleted = int(id_shipmentdeleted)\n Shipment.objects.filter(number__exact=id_shipmentdeleted).delete()\n\n if \"printShipment\" in request.POST:\n response = HttpResponse(content_type='application/ms-excel')\n response['Content-Disposition'] = 'attachment; filename=\"shipment_list.xls\"'\n wb = xlwt.Workbook(encoding='utf-8')\n ws = wb.add_sheet('Shipments')\n # Sheet header, first row\n row_num = 0\n title_style = xlwt.easyxf('font: bold on; align: wrap on, vert centre, horiz center')\n row_style = xlwt.easyxf('align: wrap on, vert centre, horiz center')\n\n columns = ['Company', 'Vessel', 'Doc', 'Odr', 'Supplier', 'Qty', 'Unit', 'Size', 'Weight', 'In-date', 'Out-date', 'Warehouse',\n 'By', 'BLno', 'Port', 'Remark', 'Job.No', 'Division', 'Status', 'Image', 'User-created', 'User-modified']\n for col_num in range(len(columns)):\n ws.write(row_num, col_num, columns[col_num], title_style)\n # Sheet body, remaining rows\n ws.col(0).width = 256 * 12\n ws.col(1).width = 256 * 20\n ws.col(2).width = 256 * 20\n ws.col(3).width = 256 * 8\n ws.col(4).width = 256 * 8\n ws.col(5).width = 256 * 5\n ws.col(6).width = 256 * 5\n ws.col(7).width = 256 * 8\n ws.col(8).width = 256 * 8\n ws.col(9).width = 256 * 10\n ws.col(10).width = 256 * 10\n ws.col(11).width = 256 * 12\n ws.col(12).width = 256 * 5\n ws.col(13).width = 256 * 8\n ws.col(14).width = 256 * 8\n ws.col(15).width = 256 * 10\n ws.col(16).width = 256 * 10\n ws.col(17).width = 256 * 8\n ws.col(18).width = 256 * 8\n ws.col(19).width = 256 * 20\n ws.col(20).width = 256 * 10\n ws.col(21).width = 256 * 10\n\n shipmentprintedlist = shipmentselectedlist.split(',')\n for id_shipmentprinted in shipmentprintedlist:\n if id_shipmentprinted == \"\":\n pass\n else:\n id_shipmentprinted = int(id_shipmentprinted)\n shipmentprinted = Shipment.objects.get(number=id_shipmentprinted)\n\n row_num += 1\n # for col_num in range(22):\n ws.write(row_num, 0, shipmentprinted.company, row_style)\n ws.write(row_num, 1, shipmentprinted.vessel, row_style)\n ws.write(row_num, 2, shipmentprinted.docs, row_style)\n ws.write(row_num, 3, shipmentprinted.odr, row_style)\n ws.write(row_num, 4, shipmentprinted.supplier, row_style)\n ws.write(row_num, 5, shipmentprinted.quanty, row_style)\n ws.write(row_num, 6, shipmentprinted.unit, row_style)\n ws.write(row_num, 7, shipmentprinted.size, row_style)\n ws.write(row_num, 8, shipmentprinted.weight, row_style)\n ws.write(row_num, 9, shipmentprinted.in_date, row_style)\n ws.write(row_num, 10, shipmentprinted.out_date, row_style)\n ws.write(row_num, 11, shipmentprinted.warehouse, row_style)\n ws.write(row_num, 12, shipmentprinted.by, row_style)\n ws.write(row_num, 13, shipmentprinted.BLno, row_style)\n ws.write(row_num, 14, shipmentprinted.port, row_style)\n ws.write(row_num, 15, shipmentprinted.remark, row_style)\n ws.write(row_num, 16, shipmentprinted.job_number, row_style)\n ws.write(row_num, 17, shipmentprinted.division, row_style)\n ws.write(row_num, 18, shipmentprinted.flag_status, row_style)\n if (shipmentprinted.image):\n tall_style = xlwt.easyxf('font:height 1000;')\n ws.row(row_num).set_style(tall_style)\n\n img = Image.open(str(settings.BASE_DIR) + shipmentprinted.image.url)\n scale = (140, 90)\n img.thumbnail(scale)\n r, g, b, a = img.split()\n img = Image.merge(\"RGB\", (r, g, b))\n img.save('sm.bmp')\n ws.insert_bitmap('sm.bmp', row_num, 19)\n ws.write(row_num, 20, shipmentprinted.insert_org, row_style)\n ws.write(row_num, 21, shipmentprinted.correct_org, row_style)\n\n wb.save(response)\n return response\n\n if \"resetFilter\" in request.POST:\n CONTEXT[\"shipmentFilter\"] = ShipmentFilter()\n return redirect(\"mainPage1\")\n\n if \"htmlShipment\" in request.POST:\n shipmentHTML = shipment_print(request)\n CONTEXT.update(shipmentHTML)\n return render(request, \"shipmentPrint.html\", CONTEXT)\n\n else:\n # CONTEXT[\"shipmentRegister\"] = ShipmentRegistration()\n # CONTEXT[\"shipmentModify\"] = ShipmentModification()\n if not request.user.is_authenticated:\n logout(request)\n return redirect(\"frontpage\")\n\n if useragent.is_mobile:\n return render(request, \"mainPage1-mobile.html\", CONTEXT)\n else:\n return render(request, \"mainPage1.html\", CONTEXT)\n\n\ndef mainView2(request):\n useragent = get_user_agent(request)\n context = {}\n\n if not request.user.is_authenticated:\n logout(request)\n return render(request, \"frontpage.html\", CONTEXT)\n # elif CONTEXT.get('customerExist') == None:\n # return render(request, \"frontpage.html\", CONTEXT)\n else:\n shipmentfilterview_result = shipmentFilterView(request)\n context[\"allVessels\"] = VESS_LIST\n context[\"vessel_key\"] = VESSEL_KEY_JS\n context.update(shipmentfilterview_result)\n\n if useragent.is_mobile:\n return render(request, \"mainPage2-mobile.html\", context)\n else:\n return render(request, \"mainPage2.html\", context)\n\n" }, { "alpha_fraction": 0.6429308652877808, "alphanum_fraction": 0.644994854927063, "avg_line_length": 36.269229888916016, "blob_id": "23be986ccbe57138efb95b310dc1b056251e39c7", "content_id": "9964eb5bbf866c16898845eec2956fcfa1a8bbd5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 969, "license_type": "no_license", "max_line_length": 100, "num_lines": 26, "path": "/Operator/models.py", "repo_name": "LongCohol/HerokuKMOUWebsite", "src_encoding": "UTF-8", "text": "# from django.conf import settings\n# from django.db import models\n# from django.contrib.auth.models import AbstractBaseUser\n#\n#\n# PERMISSION = {\n# (\"Read Only\", \"Read Only\"),\n# (\"Read + Modify\", \"Read + Modify\"),\n# }\n# MAX_LENGTH = 30\n#\n#\n# class Operator(AbstractBaseUser):\n# user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, primary_key=True)\n# operatorID = models.CharField(verbose_name=\"Operator ID\", max_length=MAX_LENGTH, default=None,\n# unique=True)\n#\n# OPRpassword = models.CharField(verbose_name=\"Password\", max_length=MAX_LENGTH, default=None)\n# permission = models.CharField(verbose_name=\"Permission\", max_length=MAX_LENGTH, default=None,\n# choices=PERMISSION)\n# dateSignUp = models.DateField(verbose_name=\"Date Signed Up\", auto_now=True)\n#\n# USERNAME_FIELD = 'operatorID'\n#\n# class Meta:\n# db_table = \"operator_table\"\n" }, { "alpha_fraction": 0.584180474281311, "alphanum_fraction": 0.5894084572792053, "avg_line_length": 41.51028823852539, "blob_id": "f0a266753598094a273527451632eb2ef90f6c2f", "content_id": "1592aeff2506b220b828132db6c01f704c2248d5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10329, "license_type": "no_license", "max_line_length": 127, "num_lines": 243, "path": "/account_forms.py", "repo_name": "LongCohol/HerokuKMOUWebsite", "src_encoding": "UTF-8", "text": "from django import forms\nfrom django.contrib.auth import authenticate\nfrom django.contrib.auth.hashers import make_password\n\nfrom User.models import Account\n# from Operator.models import Operator\n# from Company.models import Company\n# from Supplier.models import Supplier\n\n\nPERMISSION = {\n (\"Read Only\", \"Read Only\"),\n (\"Read + Modify\", \"Read + Modify\"),\n}\n\n\nclass OperatorLogin(forms.ModelForm):\n userIDstaff = forms.CharField(label=\"Staff ID\", widget=forms.TextInput)\n passwordstaff = forms.CharField(label=\"Password\", widget=forms.PasswordInput)\n\n class Meta:\n model = Account\n fields = ('userIDstaff', 'passwordstaff')\n\n def __init__(self, *args, **kwargs):\n # Set up dimension for fields with CSS style\n super(OperatorLogin, self).__init__(*args, **kwargs)\n self.fields['userIDstaff'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n self.fields['passwordstaff'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n\n def clean(self):\n if self.is_valid():\n username = self.cleaned_data.get('userIDstaff')\n password = self.cleaned_data.get('passwordstaff')\n\n if not authenticate(userID=username, password=password):\n raise forms.ValidationError(\"Invalid staff account. Please try again\")\n ###########################################################\n\n\nclass CustomerLogin(forms.ModelForm):\n userIDcustomer = forms.CharField(label=\"Customer ID\", widget=forms.TextInput)\n passwordcustomer = forms.CharField(label=\"Password\", widget=forms.PasswordInput)\n\n class Meta:\n model = Account\n fields = ('userIDcustomer', 'passwordcustomer')\n\n def __init__(self, *args, **kwargs):\n # Set up dimension for fields with CSS style\n super(CustomerLogin, self).__init__(*args, **kwargs)\n self.fields['userIDcustomer'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n self.fields['passwordcustomer'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n\n def clean(self):\n if self.is_valid():\n username = self.cleaned_data.get('userIDcustomer')\n password = self.cleaned_data.get('passwordcustomer')\n\n if not authenticate(userID=username, password=password):\n raise forms.ValidationError(\"Invalid customer account. Please try again\")\n ###########################################################\n\n\nclass OperatorForm(forms.ModelForm):\n oprID = forms.CharField()\n oprPassword = forms.CharField()\n oprPermission = forms.ChoiceField(choices=PERMISSION)\n\n class Meta:\n model = Account\n fields = ('oprID', 'oprPassword', 'oprPermission')\n\n def __init__(self, *args, **kwargs):\n # Set up dimension for fields with CSS style\n super(OperatorForm, self).__init__(*args, **kwargs)\n self.fields['oprID'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n self.fields['oprPassword'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n self.fields['oprPermission'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n\n def save(self, commit=True):\n super().save(commit=False)\n userID = self.cleaned_data.get('oprID')\n password = self.cleaned_data.get('oprPassword')\n permission = self.cleaned_data.get('oprPermission')\n isOpr = True\n account = Account.objects.create(userID=userID, password=password, rawPassword=password, permission=permission,\n companyName=\"\", email=\"\", isOpr=isOpr, vesselList=\"\")\n\n if commit:\n account.password = make_password(password)\n account.save()\n return account\n ###########################################################\n\n\nclass SupplierForm(forms.ModelForm):\n splID = forms.CharField()\n splName = forms.CharField()\n splPassword = forms.CharField()\n\n class Meta:\n model = Account\n fields = ('splID', 'splName', 'splPassword')\n\n def __init__(self, *args, **kwargs):\n # Set up dimension for fields with CSS style\n super(SupplierForm, self).__init__(*args, **kwargs)\n self.fields['splID'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n self.fields['splName'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n self.fields['splPassword'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n\n def save(self, commit=True):\n super().save(commit=False)\n userID = self.cleaned_data.get('splID')\n password = self.cleaned_data.get('splPassword')\n companyName = self.cleaned_data.get('splName')\n isSpl = True\n account = Account.objects.create(userID=userID, password=password, rawPassword=password, permission=\"Read Only\",\n companyName=companyName, email=\"\", isSpl=isSpl, vesselList=\"\")\n\n if commit:\n account.password = make_password(password)\n account.save()\n return account\n ###########################################################\n\n\nclass CompanyForm(forms.ModelForm):\n cpnID = forms.CharField()\n cpnName = forms.CharField()\n cpnEmail = forms.EmailField()\n cpnPassword = forms.CharField()\n cpnVesselList = forms.CharField(widget=forms.Textarea())\n\n class Meta:\n model = Account\n fields = ('cpnID', 'cpnName', 'cpnEmail', 'cpnPassword', 'cpnVesselList')\n\n def __init__(self, *args, **kwargs):\n # Set up dimension for fields with CSS style\n super(CompanyForm, self).__init__(*args, **kwargs)\n self.fields['cpnID'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n self.fields['cpnName'].widget.attrs['style'] = 'width: 4cm; height: 1cm'\n self.fields['cpnEmail'].widget.attrs['style'] = 'width: 7cm; height: 1cm'\n self.fields['cpnPassword'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n self.fields['cpnVesselList'].widget.attrs['style'] = 'width: 21cm; height: 2cm'\n\n def save(self, commit=True):\n super().save(commit=False)\n userID = self.cleaned_data.get('cpnID')\n password = self.cleaned_data.get('cpnPassword')\n email = self.cleaned_data.get('cpnEmail')\n companyName = self.cleaned_data.get('cpnName')\n vesselList = self.cleaned_data.get('cpnVesselList')\n isCpn = True\n account = Account.objects.create(userID=userID, password=password, rawPassword=password, permission=\"Read Only\",\n companyName=companyName, email=email, isCpn=isCpn, vesselList=vesselList)\n\n if commit:\n account.password = make_password(password)\n account.save()\n return account\n ###########################################################\n\n# class OperatorForm(forms.ModelForm):\n# class Meta:\n# model = Operator\n# fields = ('operatorID', 'OPRpassword', 'permission')\n#\n# def __init__(self, *args, **kwargs):\n# # Set up dimension for fields with CSS style\n# super(OperatorForm, self).__init__(*args, **kwargs)\n# self.fields['operatorID'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n# self.fields['OPRpassword'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n# self.fields['permission'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n#\n# def save(self, commit=True):\n# super().save(commit=False)\n# oprID = self.cleaned_data.get('operatorID')\n# password = self.cleaned_data.get('OPRpassword')\n# permission = self.cleaned_data.get('permission')\n# operator = Operator.objects.create(operatorID=oprID, OPRpassword=password, permission=permission)\n#\n# if commit:\n# operator.save()\n# return operator\n# ###########################################################\n#\n#\n# class SupplierForm(forms.ModelForm):\n# class Meta:\n# model = Supplier\n# fields = ('supplierID', 'supplierName', 'SPLpassword')\n#\n# def __init__(self, *args, **kwargs):\n# # Set up dimension for fields with CSS style\n# super(SupplierForm, self).__init__(*args, **kwargs)\n# self.fields['supplierID'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n# self.fields['supplierName'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n# self.fields['SPLpassword'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n#\n# def save(self, commit=True):\n# super().save(commit=False)\n# splID = self.cleaned_data.get('supplierID')\n# splName = self.cleaned_data.get('supplierName')\n# password = self.cleaned_data.get('SPLpassword')\n# supplier = Supplier.objects.create(supplierID=splID, supplierName=splName, SPLpassword=password)\n#\n# if commit:\n# supplier.save()\n# return supplier\n# ###########################################################\n#\n#\n# class CompanyForm(forms.ModelForm):\n# class Meta:\n# model = Company\n# fields = ('companyID', 'companyName', 'companyEmail', 'CPNpassword', 'vesselList')\n#\n# def __init__(self, *args, **kwargs):\n# # Set up dimension for fields with CSS style\n# super(CompanyForm, self).__init__(*args, **kwargs)\n# self.fields['companyID'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n# self.fields['companyName'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n# self.fields['companyEmail'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n# self.fields['CPNpassword'].widget.attrs['style'] = 'width: 5cm; height: 1cm'\n# self.fields['vesselList'].widget.attrs['style'] = 'width: 20cm; height: 3cm'\n#\n# def save(self, commit=True):\n# super().save(commit=False)\n# cpnID = self.cleaned_data.get('companyID')\n# cpnName = self.cleaned_data.get('companyName')\n# cpnEmail = self.cleaned_data.get('companyEmail')\n# password = self.cleaned_data.get('CPNpassword')\n# vesselList = self.cleaned_data.get('vesselList')\n#\n# company = Company.objects.create(companyID=cpnID, companyName=cpnName, companyEmail=cpnEmail, CPNpassword=password,\n# vesselList=vesselList)\n# if commit:\n# company.save()\n# return company\n# ###########################################################" } ]
15
Aersum/py-learning
https://github.com/Aersum/py-learning
ce34e20fafcf0318aab0983e878010490338608b
4cadb539933e8e93af700d98d2c80104f2bad372
557b5d3abf7c4342719b7867ad5ec48920b0835c
refs/heads/master
2020-03-27T10:17:50.763243
2019-02-11T13:58:28
2019-02-11T13:58:28
146,409,455
0
0
null
2018-08-28T07:32:13
2018-09-04T18:29:26
2018-09-04T18:33:39
Python
[ { "alpha_fraction": 0.7084745764732361, "alphanum_fraction": 0.7084745764732361, "avg_line_length": 35.875, "blob_id": "6e3ddaf14e6698931dacaa20eb655e167c87a1dc", "content_id": "d12e5b28d1fd919877bbf6f5537fb122821e7b69", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 295, "license_type": "no_license", "max_line_length": 58, "num_lines": 8, "path": "/ex13.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "from sys import argv\n#read thr WYSS section for how to run this\nscript, name, surname, age = argv\n\nprint(\"The script is called:\", script)\nprint(f\"So, {name} {surname}. You're {age} years old.\")\nvehicle = input(\"Tell me what is your favorite vehicle? \")\nprint(\"{} is good thing\".format(vehicle))\n" }, { "alpha_fraction": 0.5491803288459778, "alphanum_fraction": 0.5491803288459778, "avg_line_length": 18.0625, "blob_id": "abcee828ff275bd5676e25d3df987527641af950", "content_id": "6be7ae01f5f519abbf9e381355e05ea189d46aa3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1220, "license_type": "no_license", "max_line_length": 54, "num_lines": 64, "path": "/bonus/w3l2/py3classes.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "class Pet():\n\n def __init__(self, name, species):\n self.name = name\n self.species = species\n\n def get_name(self):\n return self.name\n\n def get_species(self):\n return self.species\n\n def __str__(self):\n return f'{self.name} is {self.species}'\n\n\nclass Cat(Pet):\n\n def __init__(self, name, species, weight):\n Pet.__init__(self, name, species)\n self.weight = weight\n\n def voice(self):\n print(\"Miauuu\")\n\n\nclass Dog(Pet):\n\n def __init__(self, name, species, weight):\n Pet.__init__(self, name, species)\n self.weight = weight\n\n def voice(self):\n print(\"woof\")\n\n\nclass CatDogMonster(Cat, Dog):\n pass\n\n\nclass ClassName(object):\n \"\"\"docstring for ClassName\"\"\"\n\n def __init__(self, arg):\n super(ClassName, self).__init__()\n self.arg = arg\n\n\nclass CatPack():\n cat_arr = []\n\n def __add__(self, cat):\n self.cat_arr.append(cat)\n return self\n\n def __len__(self):\n return len(self.cat_arr)\n\n def __str__(self):\n cat_names = [cat.name for cat in self.cat_arr]\n return 'cats in pack {}'.format(cat_names)\n\n def __call__(self):\n return 'Hello'.format(len(self))\n" }, { "alpha_fraction": 0.6906474828720093, "alphanum_fraction": 0.7625899314880371, "avg_line_length": 45.33333206176758, "blob_id": "26d9fd5984a3bba0aa48be75ed9b001b3b95b450", "content_id": "8096d94e281afad110761ae69dbff09b1aecbcdc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 278, "license_type": "no_license", "max_line_length": 73, "num_lines": 6, "path": "/README.md", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "# py-learning\nRepo for learning python with book\nAnd storing small py scripts\n* [Checklist for function creating in Python In ex 20](ex20/checklist.md)\n* [Key words list from ex1-21 for ex 22](ex22/conclusions_ex1-21.md)\n* [List of new things from Ex23](ex23/ex23_newthings.md)\n" }, { "alpha_fraction": 0.6578947305679321, "alphanum_fraction": 0.6691729426383972, "avg_line_length": 28.55555534362793, "blob_id": "08a5832a029ac689487f9e3d1240b0c3512a5c35", "content_id": "e6f3529803d8b4f6ce2965f347d381419503c4ca", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 266, "license_type": "no_license", "max_line_length": 44, "num_lines": 9, "path": "/ex18/checklist.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "def add_checklist(exersise,complete_status):\n\twith open(\"checklist.md\",'a') as f:\n\t\tif complete_status:\n\t\t\tf.write(f\"[x] {exersise}\\n\")\n\t\telse:\n\t\t\tf.write(f\"[ ] {exersise}\\n\")\n\tprint(\"Complete!\")\nadd_checklist(\"Exersise 1\", True)\nadd_checklist(\"Exersise 20\", False)\n" }, { "alpha_fraction": 0.7252747416496277, "alphanum_fraction": 0.7462537288665771, "avg_line_length": 36.03703689575195, "blob_id": "76f11b19c95a2dbd621493de3a60e7a731029360", "content_id": "4fda4fc98f6fb5c8579cace3f54795f480f90348", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1001, "license_type": "no_license", "max_line_length": 70, "num_lines": 27, "path": "/ex19.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "def cheese_and_crackers(cheese_count, boxes_of_crackers):\n\tprint(f\"You have {cheese_count} cheeses!\")\n\tprint(f\"You have {boxes_of_crackers} boxes of crackers!\")\n\tprint(\"Man that's enough for a party!\")\n\tprint(\"Get a bancket.\\n\")\ndef milk_and_coffee(milk, coffee):\n\tprint(f\"Mixing {milk} milk and {coffee} coffee\")\t\n#passing to the function numbers directly\nprint(\"We can just give the function numbers directly:\")\ncheese_and_crackers(20, 30)\n\n#definition of variables\nprint(\"Or, we can use variables from our script:\")\namount_of_cheese = 10\namount_of_crackers = 50\n#passing to the function variables\ncheese_and_crackers(amount_of_cheese, amount_of_crackers)\n\n#passing to the function math expressions\nprint(\"We acn even do math inside too:\")\ncheese_and_crackers(10+20, 5+6)\n\n#combining variants of passing\nprint(\"And we can combine the two, vriables and math:\")\ncheese_and_crackers(amount_of_cheese + 100, amount_of_crackers + 1000)\n\nmilk_and_coffee(input(\"Milk amount? \"), input(\"Coffee amount? \"))\n\n" }, { "alpha_fraction": 0.7225351929664612, "alphanum_fraction": 0.7338027954101562, "avg_line_length": 53.61538314819336, "blob_id": "b932e25943a48cdb39e3acd26bc3cb768e52d9fe", "content_id": "57dae90b10292e06e36cdb4826fd20ef59511b33", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 714, "license_type": "no_license", "max_line_length": 78, "num_lines": 13, "path": "/ex20/checklist.md", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "# Checklist for function creating\n- [ ] 1.Did you start your function definition with def?\n- [ ] 2.Does your function name have only characters and_(underscore)\ncharacters?\n- [ ] 3.Did you put an open parenthesis(right after the function name?\n- [ ] 4.Did you put your arguments after the parenthesis(separated by commas?\n- [ ] 5.Did you make each argument unique (meaning no duplicated names)?\n- [ ] 6.Did you put a close parenthesis and a colon):after the arguments?\n- [ ] 7.Did you indent all lines of code you want in the function four spaces?\nNo more, no less.\n- [ ] 8.Did you ”end” your function by going back to writing with no indent?\n\n[Return to main repo page](https://github.com/Aersum/py-learning)\n" }, { "alpha_fraction": 0.6330645084381104, "alphanum_fraction": 0.6330645084381104, "avg_line_length": 21.545454025268555, "blob_id": "db28eaed02cca7a1d2dd42f1454edcbc81b094a0", "content_id": "a410c0de1f9face243320d69226b68b3fc837d08", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 248, "license_type": "no_license", "max_line_length": 37, "num_lines": 11, "path": "/ex16/ex16r.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "from sys import argv\n#unpucking\nscript, filename = argv\nprint(f\"Openning file {filename}...\")\ntxt = open(filename)\nprint(\"Here's content of file:\")\nprint(\"------------\")\nprint(txt.read())\nprint(\"------------\")\nprint(\"Closing the file\")\ntxt.close()\n" }, { "alpha_fraction": 0.7099811434745789, "alphanum_fraction": 0.7156308889389038, "avg_line_length": 34.33333206176758, "blob_id": "3ba2af168e7f0c694cf005930e6d401422556783", "content_id": "777d0cb77087dc53ffc40faa229be03e0986dbce", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 531, "license_type": "no_license", "max_line_length": 50, "num_lines": 15, "path": "/ex9.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "\n#creating vars days (one line) and\n#months (many lines because of \"\\n symbol\") \ndays = \"Mon Tue Wed Thu Fri Sat Sun\"\nmonths = \"Jan\\n Feb\\nMar\\nApr\\nMay\\nJun\\nJul\\nAug\"\n# printing this vars with some text before\nprint(\"Here are the days: \", days)\nprint(\"Here are the months: \", months)\n#printing some text with saving format of writen\n#because it have written in \"\"\" quotes\nprint(\"\"\"\nThere's something going on here.\nWith the three double qoutes.\nWe'll be able to type as much as we like.\nEven 4 lines if we want, or 5, or 6.\n\"\"\")\n" }, { "alpha_fraction": 0.753600001335144, "alphanum_fraction": 0.7680000066757202, "avg_line_length": 31.894737243652344, "blob_id": "7bc54a6e0fe8532f0311f1991a72b83c0bc6c2d5", "content_id": "9ba040b5f4afaa3d0ae31f1b19b5063338b2d4af", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 625, "license_type": "no_license", "max_line_length": 70, "num_lines": 19, "path": "/ex4.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "#number of cars\ncars=100\n#number of people you can put in one car\nspace_in_a_car=4.0\n#number of drivers\ndrivers=30\n#number of passengers\npassengers=90\ncars_not_driven=cars-drivers\ncars_driven=drivers\ncarpool_capacity=cars_driven * space_in_a_car\naverage_passengers_per_car=passengers / cars_driven\n\nprint(\"There are\", cars,\"cars avaible\")\nprint(\"There are only\",drivers,\"drivers avaible.\")\nprint(\"There will be\",cars_not_driven,\"empty cars today\")\nprint(\"We caan transport\",carpool_capacity,\"people today\")\nprint(\"We have\",passengers,\"to carpool today\")\nprint(\"We need to put about\",average_passengers_per_car,\"in each car\")\n" }, { "alpha_fraction": 0.7119341492652893, "alphanum_fraction": 0.7242798209190369, "avg_line_length": 23.299999237060547, "blob_id": "871d4971371cd873f98c9c655c956d33edd1ea2d", "content_id": "b947a7adf5223e2b6abf2be3babd813ce93558f1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 243, "license_type": "no_license", "max_line_length": 54, "num_lines": 10, "path": "/bonus/w3l2/test.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "from py3classes import *\nprint(Pet)\np = Pet('dog', 'dolmatine')\nprint(p.get_name())\nprint(p.get_species())\nprint(p)\nmonster = CatDogMonster('Franksty', 'transilvpet', 90)\nprint(monster.weight)\nprint(monster.voice())\nprint(CatDogMonster.mro())\n" }, { "alpha_fraction": 0.6677796244621277, "alphanum_fraction": 0.6928213834762573, "avg_line_length": 23.95833396911621, "blob_id": "82ab5888db95b77a51906832ef8e6b31006c8cea", "content_id": "b3ab1579bd2a29abda4a9657d78591b14593d8d4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 603, "license_type": "no_license", "max_line_length": 80, "num_lines": 24, "path": "/ex23/ex23_newthings.md", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "### EX23\n### New things in exersise\n```python\nlanguages = open(\"languages.txt\",encoding=\"utf-8\")\n```\n```python\ndef print_line(line, encoding, errors):\n\tnext_lang = line.strip() #delete white spaces from begin and end of string\n\traw_bytes = next_lang.encode(encoding, errors = errors) #get byte string b'xxx'\n\tcooked_string = raw_bytes.decode(encoding, errors = errors)\n```\n```python\n>>> 0b1011010\n90\n```\n```python\nord ( ’Z ’ ) #turns character to number in ASCII table\n```\n```python\nchr(90) #turns Ascii number into character\n```\n\n\n[Return to main repo page](https://github.com/Aersum/py-learning)\n" }, { "alpha_fraction": 0.514970064163208, "alphanum_fraction": 0.5209580659866333, "avg_line_length": 21.85714340209961, "blob_id": "fd261da8e6c30e07ce9bbc97d22bf5781ba8d9d2", "content_id": "ad4955624d11b4023717a00436ff277b134bff39", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 167, "license_type": "no_license", "max_line_length": 38, "num_lines": 7, "path": "/tryit.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "def newfunc(t):\r\n try:\r\n t=int(t)\r\n print(\"Number! {0}\".format(t))\r\n except ValueError:\r\n print('Error')\r\nnewfunc(input('Enter number: '))\r\n" }, { "alpha_fraction": 0.7083839774131775, "alphanum_fraction": 0.7114216089248657, "avg_line_length": 25.516128540039062, "blob_id": "cb82d08cde31128e2155a0dfc849848da3c36316", "content_id": "d0ffd092213fa18b2552b7018c6f64efa62d74a6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1646, "license_type": "no_license", "max_line_length": 87, "num_lines": 62, "path": "/bonus/lect5.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "from datetime import datetime\nfrom datetime import timedelta\nimport io\nimport os\nimport pprint\nimport logging\nimport json\nprint(datetime.now())\nprint(datetime.now().isoformat())\nprint(datetime.now().strftime(\"%Y-%m-%d\"))\nprint(datetime.now().strftime(\"%Y-%m-%dT%H-%M\"))\nprint(datetime.now().strftime(\"%Y-%m-%dT%H:%M\"))\n\nprint('#timedelta')\nprint(datetime.now() + timedelta(hours=1))\nprint(\"#Grinwich time\")\nprint(datetime.utcnow())\n\n#files\nfile = open(\"file.txt\", 'w')\nfile.write(\"Hello world\")\nfile.close()\nwith open('file.txt', 'r') as file:\n\tprint(file.read())\n#io\n# creating files na letu\nprint('io')\nbytebuffer = io.BytesIO()\n# bytebuffer = io.StringIO()\nbytebuffer.write(b'Hello World')\nprint(bytebuffer.getvalue())\nbytebuffer.close()\n# making dir\n# os.mkdir('test1')\n#working in bash\n#'test' in os.listdir()\nprint(os.path.isfile('test'))\n# environmental vars\nprint(os.environ.get(\"HELLO\"))\npprint.pprint(dir(os.environ))\n# logging\n#creates file and stream logger\n# logger = logging.getLogger(\"my_logger\")\n# logger.setLevel(logging.INFO)\n# formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n# stream = logging.StreamHandler()\n# file = logging.FileHandler(\"logging.log\")\n# stream.setFormatter(formatter)\n# file.setFormatter(formatter)\n# logger.addHandler(stream)\n# logger.addHandler(file)\n# logger.info(\"Hello World\")\n# logger.warning(\"Warning! Achtung! You can break something\")\n\n# json\ndata = {\"one\": 1, \"two\": 2, \"three\": 3}\n#converts dict to string\nprint(json.dumps(data))\njson_data = '{\"hello\": \"world\"}'\n# from string to json data\nprint(json.loads(json_data))\nprint(json.loads(json_data)['hello'])\n\n\n" }, { "alpha_fraction": 0.6802120208740234, "alphanum_fraction": 0.6802120208740234, "avg_line_length": 22.58333396911621, "blob_id": "d42c013a55c4671e157a20b059d5b998b3bc0724", "content_id": "2fb40e86d68f45690240555633f2c984368288ca", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 566, "license_type": "no_license", "max_line_length": 87, "num_lines": 24, "path": "/ex17/ex17.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "from sys import argv\nfrom os.path import exists\n#unpucking\nscript, from_file, to_file = argv\n\nprint(f\"Copying from {from_file} to {to_file} (Existing: {exists(to_file)})->\",end=' ')\n\n#we colud do these two on one line, how?\nin_file = open(from_file)\n#in_data = in_file.read()\n\n#print(f\"The input file is {len(in_data)} bytes long\")\n\n#print(f\"Does the outout file exist? {exists(to_file)}\")\n#print(\"Ready, hit Return to continue. CTRL-C to abort.\")\n#input()\n\nout_file = open(to_file, 'w')\nout_file.write(in_file.read())\n\nprint(\"OK\")\n\nout_file.close()\nin_file.close()\n" }, { "alpha_fraction": 0.5208333134651184, "alphanum_fraction": 0.5672348737716675, "avg_line_length": 21.446807861328125, "blob_id": "469e3ec7a165443f7584506205e64a4220720737", "content_id": "3bd3447367c66780fd167dae5bb5e670f7b11983", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1056, "license_type": "no_license", "max_line_length": 76, "num_lines": 47, "path": "/ex21.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "def add(a, b):\n\tprint(f\"ADDING {a} + {b}\")\n\treturn a+b\n\ndef subtract(a, b):\n\tprint(f\"SUBTRACTING {a} - {b}\")\n\treturn a -b\n\ndef multiply(a, b):\n\tprint(f\"MULTIPLYING {a} * {b}\")\n\treturn a * b\n\ndef divide (a, b):\n\tprint(f\"DIVIDING {a} / {b}\")\n\treturn a /b\n\ndef quad_equation(a, b, c):\n\tD= b**2 - 4 * a * c\n\tif D >= 0:\n\t\tx1 = (-b + D**0.5) / (2 * a)\n\t\tx2 = (-b - D**0.5) / (2 * a)\n\telse:\n\t\tx1 = False\n\t\tx2 = False\n\treturn x1, x2\n\t\t\nprint(\"Let's do some match with just functions!\")\n\nage = add(30, 5)\nheight = subtract(78, 4)\nweight = multiply(90, 2)\niq = divide(100, 2)\n\nprint(f\"Age: {age}, Height: {height}, Weight: {weight}, IQ: {iq}\")\n\n\n#A puzzle for extra credit. type it in anyway\nprint(\"Here is a puzzle.\")\n\nwhat = add(age, subtract(height, multiply(weight, divide(iq, 2))))\n\nprint(\"That becomes: \", what, \"Can you do it by hand?\")\n\nfa, sa = quad_equation(2, 5 , 3)\nprint(f\"Roots of equation 2*x**2 + 5 * x + 3 = 0 is: x1 = {fa}, x2 = {sa}\")\nfa, sa = quad_equation(5, 5 , 3)\nprint(f\"Roots of equation 5*x**2 + 5 * x + 3 = 0 is: x1 = {fa}, x2 = {sa}\")\n\n" }, { "alpha_fraction": 0.7030567526817322, "alphanum_fraction": 0.7030567526817322, "avg_line_length": 18.16666603088379, "blob_id": "2ab2a76bf1ca5ae3570e2f04308c1ae0ed09fc9b", "content_id": "5455c3bd482bd0c881a540d598168d5cd419023f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 229, "license_type": "no_license", "max_line_length": 46, "num_lines": 12, "path": "/bonus/lect5_http.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "#import requests as rq\n# import sys\n# import pprint\n# import json\n# print(sys.version)\n# url = \"https://jsonplaceholder.typicode.com\"\n# res = rq.get(url + \"/todos\")\n# print(res)\n# print(res.content)\n\nimport sys\nprint(sys.version)" }, { "alpha_fraction": 0.6981707215309143, "alphanum_fraction": 0.6981707215309143, "avg_line_length": 22.428571701049805, "blob_id": "7c4f49b4318975850d1ed0415d5ad85d65b29d7d", "content_id": "1df4dce18d8b7bec55c26464e322ba3e4a9820d7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 328, "license_type": "no_license", "max_line_length": 47, "num_lines": 14, "path": "/ex23/ex23_rev.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "import sys\n#script, input_decoding, error = sys.argv\n\ndef bfile_create():\n\tfile_wbytestr = open('lang_bytes.txt')\n\tbinary_file = open(\"raw_bytes_lang.txt\", \"wb\")\n\tline = file_wbytestr.readline()\n\tif line:\n\t\tbinary_file.write(line)\n\t\treturn bfile_create()\n\tbynary_file.close()\n\tfile_wbytestr.close()\n\tprint('OK')\nbfile_create()\n" }, { "alpha_fraction": 0.5483871102333069, "alphanum_fraction": 0.6451612710952759, "avg_line_length": 14.5, "blob_id": "f799183c7f9dea5bc5caa92b058a2548e273a853", "content_id": "ebe436077eb67f98a007d80950fb92832af764e7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 62, "license_type": "no_license", "max_line_length": 15, "num_lines": 4, "path": "/ex18/checklist.md", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "[x] Exersise 1\n[ ] Exersise 20\n[x] Exersise 1\n[ ] Exersise 20\n" }, { "alpha_fraction": 0.6040268540382385, "alphanum_fraction": 0.6140939593315125, "avg_line_length": 17.625, "blob_id": "41d738459b59a440700f1c2ca666ae0c267f1822", "content_id": "6c78dc1c028937067d246f8b6d7a6388902daaff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 298, "license_type": "no_license", "max_line_length": 35, "num_lines": 16, "path": "/bonus/w3l2/classObj.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "class Parent:\n __private_attr = 1\n protected_attr = 2\n attr = 3\n\n\nclass Child(Parent):\n\n def get_parent_public(self):\n print(self.attr)\n\n def get_parent_protected(self):\n print(self._protected_attr)\n\n def get_parent_private(self):\n print(self.__private_attr)\n" }, { "alpha_fraction": 0.5860173106193542, "alphanum_fraction": 0.6056559085845947, "avg_line_length": 14.912500381469727, "blob_id": "da8a8bd7e39e76dc582acb1e9b5bfe41b6ff5316", "content_id": "394a409cd14dd5b668a52da8c878edba7c02827a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1273, "license_type": "no_license", "max_line_length": 65, "num_lines": 80, "path": "/ex22/conclusions_ex1-21.md", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "### EX22\n### Learned Pywords list\n\n * print\n * input\n * ==, >=, <=, !=\n * True, False\n```python\nprint(\"We have\",passengers,\"to carpool today\")\n```\n```python\nround(x) #rounds a number\n```\n```python\nprint(f\"Let's talk about {name}.\") #f-string\n```\n```python\nprint(\"328/30={0:0.2f}\".format(x)) #formtting for float numbers\n```\n ```python\n\"Isn't that joke so funny?! {}\".format(some_var)\n```\n```python\nprint(\".\"*10) \n```\n```python \nprint(end1 + end2 + end3 + end4 + end5 + end6, end=' ') \n```\n```python\nprint (\"\"\"\n Some\n Formatted\n Text\n\"\"\")\n```\n```python\ntaby_cat = \"\\tI'm tabbed in.\"\n```\n```python\nfrom sys import argv\nscript, name, surname, age = argv\n```\n```python\ntxt = open(filename)\nprint(txt.read())\ntxt.close()\n```\n```python\ntarget = open(filename,'w')\n```\n```python\nfrom os.path import exists\nexists(to_file)\n```\n```python\nwith open(from_file) as f:\n\tin_data = f.read()\n```\n```python\n#so many args\ndef print_two(*args):\n\targ1, arg2 = args\n\tprint(f\"arg1: {arg1}, arg2: {arg2}\")\n```\n```python\n#Rewind to begin of file\ndef rewind(f):\n\tf.seek(0)\n```\n```python\nf.readline() #read only one line from current position\n```\n```python\n# standart def form\ndef func(arg)\n b=arg\n return b\n```\n\n[Return to main repo page](https://github.com/Aersum/py-learning)\n" }, { "alpha_fraction": 0.7183462381362915, "alphanum_fraction": 0.7183462381362915, "avg_line_length": 26.64285659790039, "blob_id": "7569f231905f40d16fed53e566b7eeed5377dbbf", "content_id": "adf15c9533d0f71a8d70896644a2f4a7600559e6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 387, "license_type": "no_license", "max_line_length": 51, "num_lines": 14, "path": "/ex15/ex15.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "from sys import argv\n#pass argument from argv to file name\n#script, filename = argv\n#creating object \"txt\" of file passed to \"filename\"\n#txt = open(filename)\n#printing result of working method txt.open()\n#print(f\"Here's your file {filename}:\")\n#print(txt.read())\n#input\nfilename = input(\"Enter file name: \")\ntxt = open(filename)\nprint(\"Here's your file:\")\nprint(txt.read())\ntxt.close()\n" }, { "alpha_fraction": 0.7084673047065735, "alphanum_fraction": 0.7181136012077332, "avg_line_length": 30.100000381469727, "blob_id": "6796d5388e9cd96663d33b33fb079a07a5674227", "content_id": "c21f2264059392f204430f6cb6fc7dc3a4ac361f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 933, "license_type": "no_license", "max_line_length": 59, "num_lines": 30, "path": "/ex16/ex16.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "from sys import argv\n#unpucking argv\nscript, filename = argv\n\nprint(f\"We're going to erase {filename}.\")\nprint(\"If you don't want that, hit CTRL+C.\")\nprint(\"If you do want that, hit any key.\")\n#stopping program until hiiting the key\ninput(\"?\")\n#recieving the object of file with filename in writing mode\nprint(\"Opening the file...\")\ntarget = open(filename,'w')\n#Erasing all informating in file\n#(this don't need when file opened with 'w' parameter)\nprint(\"Truncating the file. Goodbye!\")\n#target.truncate()\n\nprint(\"Now I'm going to ask you for three lines.\")\n#Asking user to input lines to recording to the file\nline1 = input(\"line 1: \")\nline2 = input(\"line 2: \")\nline3 = input(\"line 3: \")\n\nprint(\"I'm going to write these to the file.\")\n#writing lines to the file and separating them \n#with \\n - symbol of line breaking\ntarget.write(f\"{line1}\\n{line2}\\n{line3}\\n\")\n#closing the file\nprint(\"And finally we close it.\")\ntarget.close()\n" }, { "alpha_fraction": 0.6628440618515015, "alphanum_fraction": 0.6628440618515015, "avg_line_length": 28.066667556762695, "blob_id": "bb6730c663bc808cfc69748fa92a46983faa6328", "content_id": "fd137a10a4d6ad60ad84b7d2012644b04adfad01", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 436, "license_type": "no_license", "max_line_length": 50, "num_lines": 15, "path": "/ex17/ex17short.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "from sys import argv\nfrom os.path import exists\nscript, from_file,to_file = argv\nprint(f\"Copying from {from_file} to {to_file}\")\n#in_file = open(from_file)\n#out_file = open(to_file,'w')\n#out_file.write(in_file.read())\n#out_file.close()\n#in_file.close()\n#*****************Another realization\nprint(f\"File {to_file} exists? {exists(to_file)}\")\nwith open(from_file) as f:\n\tin_data = f.read()\nwith open(to_file,'w') as f:\n\tf.write(in_data)\n" }, { "alpha_fraction": 0.7262499928474426, "alphanum_fraction": 0.7350000143051147, "avg_line_length": 23.24242401123047, "blob_id": "06da9ef301ed4d0dbc1efc45c38ecc5f3d1e35d0", "content_id": "4d9fcc52eda6dc049dd4a51169162f3c5690912d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 800, "license_type": "no_license", "max_line_length": 47, "num_lines": 33, "path": "/ex20/ex20.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "from sys import argv\n#unpucking argv\nscrript, input_file = argv\n#Definition function that prints all file\ndef print_all(f):\n\tprint(f.read())\n#Rewind to begin of file\ndef rewind(f):\n\tf.seek(0)\n#Print one line from current position\ndef print_a_line(line_count, f):\n\tprint(line_count, f.readline())\n#Getting object of file\ncurrent_file = open(input_file)\n\nprint(\"First let's print the whole file:\\n\")\n#Passing object of file to function in argument\nprint_all(current_file)\n\nprint(\"Now let's rewind, kind of like a tape.\")\n\nrewind(current_file)\n\nprint(\"Let's print three lines:\")\n#line_count = 1\ncurrent_line = 1\nprint_a_line(current_line, current_file)\n#line_count = 2\ncurrent_line += 1\nprint_a_line(current_line, current_file)\n#line_count = 3\ncurrent_line += 1\nprint_a_line(current_line, current_file)\n" }, { "alpha_fraction": 0.5112782120704651, "alphanum_fraction": 0.6842105388641357, "avg_line_length": 32.25, "blob_id": "51fe0d94d625fa3b6612e334315ca4e123e5e4a9", "content_id": "6d927d2eccc3ea94a1529f26511d532b6a954c27", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 133, "license_type": "no_license", "max_line_length": 46, "num_lines": 4, "path": "/ex3/ex3-new.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "print(\"We have 328 pages of book and 30 days\")\nprint(\"I should read 328/30 pages a day\")\nx=328/30\nprint(\"328/30={0:0.2f}\".format(x))\n" }, { "alpha_fraction": 0.535444974899292, "alphanum_fraction": 0.6651583909988403, "avg_line_length": 26.625, "blob_id": "13079fba396894d2af74f37238235c72f7d68d1c", "content_id": "0eac3a6efc720ac32627447484f3df220424d818", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 663, "license_type": "no_license", "max_line_length": 57, "num_lines": 24, "path": "/ex3/ex3.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "#PE(M&D)(A&S)\nprint(\"I will now count my chicken\")\n#Deviding 30/6 and then adding 25printing result.\n#25*3 then remainder(modulus) of the division of 75 to 4.\n#75 devided by 4 with 3 remaining\nprint(\"Hens\",25.0+30.0/6.0)\nprint(\"Roosters\",100.0-25.0*3.0%4.0)\n\nprint(\"Now i Will count the eggs\")\n#3+21-5+reminder of 4%2=0-0.25+6\nprint(3.0+21.0-5.0+4.0%2.0-1.0/4.0+6.0)\n#5<-2 is false\nprint(\"It's true that 3+2<5-7?\")\n#prinnting result\nprint(3+2<5-7)\nprint(\"What is 3+2?\",3+2)\nprint(\"What is 5-7?\",5-7)\nprint(\"Oh that's why it's False\")\n\nprint(\"How about some more\")\n\nprint(\"Is it grater?\",5>2)\nprint(\"Is it grater or equal?\",5>=2)\nprint(\"Is it less or equal\",5<=2)\n" }, { "alpha_fraction": 0.6646586060523987, "alphanum_fraction": 0.676706850528717, "avg_line_length": 26.61111068725586, "blob_id": "16173612095d6e900c7f7a246d70b883eed97bae", "content_id": "a856edfdda5c732534a1612d8ce26e9abefbb22d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 498, "license_type": "no_license", "max_line_length": 67, "num_lines": 18, "path": "/ex17/guess_number.py", "repo_name": "Aersum/py-learning", "src_encoding": "UTF-8", "text": "from random import random\nfrom math import ceil\nnumb = ceil(random()*10)\nuser_numb = 0\nprint(\"Guess number from 0 to 10. input 'show' to show the answer\")\nwhile user_numb != numb:\n\tuser_numb = input(\">\")\n\ttry:\n\t\tuser_numb = int(user_numb)\n\t\tif user_numb == numb:\n\t\t\tprint(\"Guessed!\")\n\t\telif user_numb>numb:\n\t\t\tprint(f\"{user_numb} more then number to guessed\")\n\t\telif user_numb<numb:\n\t\t\tprint(f\"{user_numb} less then number to guessed\")\n\texcept ValueError:\n\t\tif user_numb == 'show':\n\t\t\tprint(numb)\n\n" } ]
27
Mariohz/Proyecciones
https://github.com/Mariohz/Proyecciones
6fe79bb7b89eb733a4dc76026be31007913246ed
d7cd9d2715977ab037c43a1c080a93ef5281b1c4
a2ddcf0ada34e87a78bd4e5e606804adec0a0510
refs/heads/master
2021-08-29T08:20:54.662436
2017-12-13T14:46:03
2017-12-13T14:46:03
114,132,528
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.46250683069229126, "alphanum_fraction": 0.5900382995605469, "avg_line_length": 24.02739715576172, "blob_id": "a0db02c54e9d2a8bb21170fb0fa37f0053cbb907", "content_id": "76a347470c52734a807596c3d3efff3fd6b15f80", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1827, "license_type": "no_license", "max_line_length": 66, "num_lines": 73, "path": "/cubo.py", "repo_name": "Mariohz/Proyecciones", "src_encoding": "UTF-8", "text": "import pygame\nfrom math import pi\npygame.init()\npf=(100,10)#reales\n\ndef conversion(x,y,z): \n x=x+300\n if(y>=0):\n y=300-y\n else:\n y=-y+300\n ##punto incrementando z\n ##\n x=x+((pf[0]-x)*z/800)\n y=y+((pf[1]-y)*z/800)\n c=[x,y] \n return c\n\nBLACK = ( 0, 0, 0)\nWHITE = (255, 255, 255)\nBLUE = ( 0, 0, 255)\nGREEN = ( 0, 255, 0)\nRED = (255, 0, 0)\nsize = [600, 600]\nscreen = pygame.display.set_mode(size)\n\npygame.display.set_caption(\"Example code for the draw module\")\n\ndone = False\nclock = pygame.time.Clock()\nx=0\ny=0\nz=0\n#cubo\nv1=(-100,100,0)\nv2=(100,100,0)\nv3=(100,-100,0)\nv4=(-100,-100,0)\nv5=(-100,100,100)\nv6=(100,100,100)\nv7=(100,-100,100)\nv8=(-100,-100,100)\n#cubo\nwhile not done:\n # This limits the while loop to a max of 10 times per second.\n # Leave this out and we will use all CPU we can.\n clock.tick(24)\n \n for event in pygame.event.get(): # User did something\n if event.type == pygame.QUIT: # If user clicked close\n done=True # Flag that we are done so we exit this loop\n screen.fill(WHITE) \n #Cubo\n p1=(conversion(v1[0],v1[1],v1[2]))\n p2=(conversion(v2[0],v2[1],v2[2]))\n p3=(conversion(v3[0],v3[1],v3[2]))\n p4=(conversion(v4[0],v4[1],v4[2]))\n p5=(conversion(v5[0],v5[1],v5[2]))\n p6=(conversion(v6[0],v6[1],v6[2]))\n p7=(conversion(v7[0],v7[1],v7[2]))\n p8=(conversion(v8[0],v8[1],v8[2]))\n \n pygame.draw.polygon(screen,BLACK,[p5,p6,p7,p8],0)\n pygame.draw.polygon(screen,[10,10,10],[p2,p6,p7,p3],0)\n pygame.draw.polygon(screen,BLUE,[p8,p7,p3,p4],0)\n pygame.draw.polygon(screen,[50,50,50],[p1,p5,p8,p4],0)\n pygame.draw.polygon(screen,RED,[p1,p5,p6,p2],0)\n pygame.draw.polygon(screen,BLACK,[p1,p2,p3,p4],0)\n #Cubo \n \n pygame.display.flip()\n# Be IDLE friendly\npygame.quit()\n" }, { "alpha_fraction": 0.4192284643650055, "alphanum_fraction": 0.5361663699150085, "avg_line_length": 25.12598419189453, "blob_id": "7acb97a8caf2d026b87a07657111d10ce7a029ef", "content_id": "ddfa049fc8aea36d1eecd51cee2e0cc1b5bfa644", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3318, "license_type": "no_license", "max_line_length": 95, "num_lines": 127, "path": "/tablero.py", "repo_name": "Mariohz/Proyecciones", "src_encoding": "UTF-8", "text": "import pygame\nfrom math import pi\npygame.init()\npf=(300,300)#reales\n\ndef conversion(x,y,z): \n x=x+300\n if(y>=0):\n y=300-y\n else:\n y=-y+300\n ##punto incrementando z\n ##\n x=x+((pf[0]-x)*z/800)\n y=y+((pf[1]-y)*z/800)\n c=[x,y] \n return c\n#def trasladar(x,y)\n###\n# x=x+300\n# y=300-y\n###\nBLACK = ( 0, 0, 0)\nWHITE = (255, 255, 255)\nBLUE = ( 0, 0, 255)\nGREEN = ( 0, 255, 0)\nRED = (255, 0, 0)\nsize = [600, 600]\nscreen = pygame.display.set_mode(size)\n\npygame.display.set_caption(\"Example code for the draw module\")\n\ndone = False\nclock = pygame.time.Clock()\nx=0\ny=0\nz=0\no=(x,y,z)#(x,y,z)\n#pf=(300,50)\nc=[100,100]\nsupi=[0,0]\nsupd=[0,0]\nsupizq=[-100,100]\nsupder=[100,200]\n\n#cubo\n#v1=(-100,100,0)\n#v2=(100,100,0)\n#v3=(100,-100,0)\n#v4=(-100,-100,0)\n#v5=(-100,100,100)\n#v6=(100,100,100)\n#v7=(100,-100,100)\n#v8=(-100,-100,100)\n\n\n#cubo\nwhile not done:\n # This limits the while loop to a max of 10 times per second.\n # Leave this out and we will use all CPU we can.\n clock.tick(24)\n \n for event in pygame.event.get(): # User did something\n if event.type == pygame.QUIT: # If user clicked close\n done=True # Flag that we are done so we exit this loop\n screen.fill([10,10,10]) \n #Rectangulo\n #supi=conversion(supizq[0],supizq[1],z)\n #supd=conversion(supder[0],supder[1],z)\n #pygame.draw.rect(screen,BLACK,[supi[0],supi[1],abs(supi[0]-supd[0]),abs(supi[1]-supd[1])])\n #z=z+2\n #if z>=600:\n # z=0 \n #Rectangulo\n #pygame.display.flip()#volcar memoria al monitor.\n #Cubo\n #p1=(conversion(v1[0],v1[1],v1[2]))\n #p2=(conversion(v2[0],v2[1],v2[2]))\n #p3=(conversion(v3[0],v3[1],v3[2]))\n #p4=(conversion(v4[0],v4[1],v4[2]))\n #p5=(conversion(v5[0],v5[1],v5[2]))\n #p6=(conversion(v6[0],v6[1],v6[2]))\n #p7=(conversion(v7[0],v7[1],v7[2]))\n #p8=(conversion(v8[0],v8[1],v8[2]))\n \n #pygame.draw.polygon(screen,BLACK,[p5,p6,p7,p8],0)\n #pygame.draw.polygon(screen,BLACK,[p2,p6,p7,p3],0)\n #pygame.draw.polygon(screen,BLUE,[p1,p5,p8,p4],0)\n #pygame.draw.polygon(screen,BLACK,[p8,p7,p3,p4],0)\n #pygame.draw.polygon(screen,RED,[p1,p5,p6,p2],0)\n #pygame.draw.polygon(screen,BLACK,[p1,p2,p3,p4],0)\n #Cubo \n #Tablero\n x=-200\n y=-200\n z=0\n for j in range(0,8): \n for i in range(0,8):\n v1=(x,y,z)\n v2=(x,y,z+50)\n v3=(x+50,y,z+50)\n v4=(x+50,y,z)\n v5=(x,y-25,z)\n v6=(x+50,y-25,z) \n #QQQcara superior (esq inf izq, sup izq)\n p1=(conversion(v1[0],v1[1],v1[2])) \n p2=(conversion(v2[0],v2[1],v2[2]))\n p3=(conversion(v3[0],v3[1],v3[2]))\n p4=(conversion(v4[0],v4[1],v4[2]))\n p5=(conversion(v5[0],v5[1],v5[2]))\n p6=(conversion(v6[0],v6[1],v6[2])) \n #QQQ\n #pygame.draw.polygon(screen,BLACK,[p1,p4,p5,p6],0) \n if (i+j)%2==0:\n pygame.draw.polygon(screen,BLACK,[p1,p2,p3,p4],0)\n else:\n pygame.draw.polygon(screen,WHITE,[p1,p2,p3,p4],0)\n if j<1:\n pygame.draw.polygon(screen,[230,230,230],[p1,p4,p6,p5],0) \n x=x+50\n x=-200\n z=z+50\n #Tablero\n\n pygame.display.flip()\n# Be IDLE friendly\npygame.quit()\n" }, { "alpha_fraction": 0.4980016052722931, "alphanum_fraction": 0.5435651540756226, "avg_line_length": 20.929824829101562, "blob_id": "2ed1c2985f4af2d9a81d0e3ff5a9b9a908161829", "content_id": "8d786fa0ae486c4059320bbc57f2bb1a0b9c25a8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1251, "license_type": "no_license", "max_line_length": 66, "num_lines": 57, "path": "/circuloRebota.py", "repo_name": "Mariohz/Proyecciones", "src_encoding": "UTF-8", "text": "import pygame\nfrom math import pi\npygame.init()\nBLACK = ( 0, 0, 0)\nWHITE = (255, 255, 255)\nBLUE = ( 0, 0, 255)\nGREEN = ( 0, 255, 0)\nRED = (255, 0, 0)\nsize = [300, 300]\nscreen = pygame.display.set_mode(size)\npygame.display.set_caption(\"Example code for the draw module\")\n\ndone = False\nclock = pygame.time.Clock()\nx=5\ny=5\nradio=10\nflag=0\nflagx=0\nwhile not done:\n\n # This limits the while loop to a max of 10 times per second.\n # Leave this out and we will use all CPU we can.\n clock.tick(10)\n\n for event in pygame.event.get(): # User did something\n if event.type == pygame.QUIT: # If user clicked close\n done=True # Flag that we are done so we exit this loop\n screen.fill(WHITE)\n\n ######\n pygame.draw.circle(screen, BLUE, [x, y], radio)\n if flag==0:\n if y+radio<size[1]:\n y=y+5\n else:\n flag=1 \n else:\n if y-radio>0:\n y=y-5\n else:\n flag=0\n if flagx==0:\n if x+radio<size[0]:\n x=x+1\n else:\n flagx=1\n else:\n if x-radio>0:\n x=x-1\n else:\n flagx=0\n #####\n pygame.display.flip()#volcar memoria al monitor.\n\n# Be IDLE friendly\npygame.quit()\n\n" }, { "alpha_fraction": 0.5177358388900757, "alphanum_fraction": 0.5932075381278992, "avg_line_length": 21.457626342773438, "blob_id": "9a3ffcd755ded9bcededd93b463d459cd20f7828", "content_id": "449d81c445c679689cefaea7316b850422bdb5bd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1325, "license_type": "no_license", "max_line_length": 94, "num_lines": 59, "path": "/rectangulo.py", "repo_name": "Mariohz/Proyecciones", "src_encoding": "UTF-8", "text": "import pygame\nfrom math import pi\npygame.init()\npf=(300,10)#reales\n\ndef conversion(x,y,z): \n x=x+300\n if(y>=0):\n y=300-y\n else:\n y=-y+300\n ##punto incrementando z\n ##\n x=x+((pf[0]-x)*z/300)\n y=y+((pf[1]-y)*z/300)\n c=[x,y] \n return c\nBLACK = ( 0, 0, 0)\nWHITE = (255, 255, 255)\nBLUE = ( 0, 0, 255)\nGREEN = ( 0, 255, 0)\nRED = (255, 0, 0)\nsize = [600, 600]\nscreen = pygame.display.set_mode(size)\n\npygame.display.set_caption(\"Example code for the draw module\")\n\ndone = False\nclock = pygame.time.Clock()\nx=0\ny=0\nz=0\nc=[100,100]\nsupi=[0,0]\nsupd=[0,0]\nsupizq=[-100,100]\nsupder=[100,200]\n\nwhile not done:\n # This limits the while loop to a max of 10 times per second.\n # Leave this out and we will use all CPU we can.\n clock.tick(24)\n \n for event in pygame.event.get(): # User did something\n if event.type == pygame.QUIT: # If user clicked close\n done=True # Flag that we are done so we exit this loop\n screen.fill(WHITE) \n #Rectangulo\n supi=conversion(supizq[0],supizq[1],z)\n supd=conversion(supder[0],supder[1],z)\n pygame.draw.rect(screen,BLACK,[supi[0],supi[1],abs(supi[0]-supd[0]),abs(supi[1]-supd[1])])\n z=z+2\n if z>=300:\n z=0 \n #Rectangulo\n\n pygame.display.flip()\n# Be IDLE friendly\npygame.quit()\n" } ]
4
scallop722/mylib
https://github.com/scallop722/mylib
1e4b146fd5d141d4620f0068d9f2d38087cfa459
a1327d0b3b4f33ca769d5bc3fd11ee070f9a47fe
79c1ebb820fea71ed410eca78137e286d7c5d476
refs/heads/main
2023-07-09T12:09:17.310777
2021-08-11T07:42:21
2021-08-11T07:42:21
394,122,067
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7025440335273743, "alphanum_fraction": 0.7142857313156128, "avg_line_length": 17.962963104248047, "blob_id": "20844e0c95a485bac4cd554eeafec134d319fe20", "content_id": "4c2a94efeae60a2603e68a89c1e669c0162dd879", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 541, "license_type": "no_license", "max_line_length": 61, "num_lines": 27, "path": "/setting.py", "repo_name": "scallop722/mylib", "src_encoding": "UTF-8", "text": "from sqlalchemy import *\nfrom sqlalchemy.orm import *\nfrom sqlalchemy.ext.declarative import declarative_base\nimport psycopg2\n\n# postgresqlのDBの設定\nDATABASE = \"postgresql://mylib:hogehoge@localhost:5432/mylib\"\n\n# Engineの作成\nENGINE = create_engine(\n DATABASE,\n encoding=\"utf-8\",\n echo=True\n)\n\n# Sessionの作成\nsession = scoped_session(\n sessionmaker(\n autocommit=False,\n autoflush=False,\n bind=ENGINE\n )\n)\n\n# modelで使用する\nBase = declarative_base()\nBase.query = session.query_property()" }, { "alpha_fraction": 0.6575505137443542, "alphanum_fraction": 0.6623067855834961, "avg_line_length": 26.161291122436523, "blob_id": "806e326146c9946c90670cc74cceb406dc14d094", "content_id": "dadf64a9dcfaf4c534913d9d5996920d13d1e2b5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 855, "license_type": "no_license", "max_line_length": 86, "num_lines": 31, "path": "/entity/Book.py", "repo_name": "scallop722/mylib", "src_encoding": "UTF-8", "text": "from typing import List\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy import Column, Integer, String, DateTime\nfrom sqlalchemy.orm import relationship\nfrom setting import Base\nfrom setting import ENGINE\n\nclass Book(Base):\n \"\"\"\n Book\n \"\"\"\n\n __tablename__ = 'book'\n id = Column(Integer, primary_key=True, autoincrement=True)\n lending_histories = relationship(\"LendingHistory\", backref=\"book\")\n title = Column(String(30))\n author = Column(String(30))\n note = Column(String)\n\n def __init__(self, name):\n self.name = name\n\n def get_planed_return_date(self):\n if ([history for history in self.lending_histories if not history.returned ]):\n return \"貸出中\"\n else:\n return \"返却済み\"\n\n\nif __name__ == \"__main__\":\n Base.metadata.create_all(bind=ENGINE)" }, { "alpha_fraction": 0.7008928656578064, "alphanum_fraction": 0.7038690447807312, "avg_line_length": 27.04166603088379, "blob_id": "11d94e67ea262f3a220ef7e5cc6bb1377666da15", "content_id": "51039d86289541902e5d03e257856e6b0bf6c742", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 672, "license_type": "no_license", "max_line_length": 62, "num_lines": 24, "path": "/entity/Member.py", "repo_name": "scallop722/mylib", "src_encoding": "UTF-8", "text": "from datetime import datetime\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy import Column, Integer, String, DateTime\nfrom sqlalchemy.orm import relationship\nfrom sqlalchemy.sql.sqltypes import Boolean\nfrom setting import Base\nfrom setting import ENGINE\n\nclass Member(Base):\n \"\"\"\n Member\n \"\"\"\n\n __tablename__ = 'member'\n id = Column(Integer, primary_key=True, autoincrement=True)\n member = relationship(\"LendingHistory\", backref=\"member\")\n name = Column(String(20))\n disabled = Column(Boolean)\n \n def __init__(self, name):\n self.name = name\n\nif __name__ == \"__main__\":\n Base.metadata.create_all(bind=ENGINE)" }, { "alpha_fraction": 0.6740858554840088, "alphanum_fraction": 0.6740858554840088, "avg_line_length": 23.230770111083984, "blob_id": "6d3495ca53353563e1ca66a6af2c00a664118bc8", "content_id": "d7fe8df934fc357a61e05bf82768c76d9f08b458", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 649, "license_type": "no_license", "max_line_length": 57, "num_lines": 26, "path": "/app.py", "repo_name": "scallop722/mylib", "src_encoding": "UTF-8", "text": "from entity.Book import Book\nfrom entity.LendingHistory import LendingHistory\nfrom flask import Flask, render_template, redirect\nfrom flask.helpers import url_for\nfrom setting import session\n\napp = Flask(__name__)\n\[email protected](\"/book/list\", methods=[\"GET\"])\ndef list():\n \"\"\"\n 図書一覧\n \"\"\"\n books = session.query(Book).all()\n return render_template(\"book/list.html\", books=books)\n\[email protected](\"/member/create\", methods=[\"GET\"])\ndef entry():\n \"\"\"\n ユーザー登録\n \"\"\"\n return render_template(\"member/entry.html\")\n\[email protected](\"/member/create\", methods=[\"POST\"])\ndef create():\n return redirect(url_for(\"/book/list\"))" }, { "alpha_fraction": 0.7203841805458069, "alphanum_fraction": 0.7267876267433167, "avg_line_length": 15.086206436157227, "blob_id": "c44a4b246857c95c092591164a35709b9e6ee25d", "content_id": "7435a5a8979993da0da2dbdf65d0cc2a6bbfba87", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "SQL", "length_bytes": 937, "license_type": "no_license", "max_line_length": 43, "num_lines": 58, "path": "/db/mylib.sql", "repo_name": "scallop722/mylib", "src_encoding": "UTF-8", "text": "\n/* Drop Tables */\n\nDROP TABLE IF EXISTS lending_history;\nDROP TABLE IF EXISTS book;\nDROP TABLE IF EXISTS member;\n\n\n\n\n/* Create Tables */\n\nCREATE TABLE book\n(\n\tid bigserial NOT NULL,\n\ttitle varchar(30) NOT NULL,\n\tauthor varchar(30) NOT NULL,\n\tnote text,\n\tPRIMARY KEY (id)\n) WITHOUT OIDS;\n\n\nCREATE TABLE lending_history\n(\n\tid bigserial NOT NULL,\n\tbook_id bigint NOT NULL,\n\tmember_id bigint NOT NULL,\n\tlending_date date NOT NULL,\n\treturned boolean DEFAULT 'false' NOT NULL,\n\tPRIMARY KEY (id)\n) WITHOUT OIDS;\n\n\nCREATE TABLE member\n(\n\tid bigserial NOT NULL,\n\tname varchar(20) NOT NULL,\n\tdisabled boolean DEFAULT 'false' NOT NULL,\n\tPRIMARY KEY (id)\n) WITHOUT OIDS;\n\n\n\n/* Create Foreign Keys */\n\nALTER TABLE lending_history\n\tADD FOREIGN KEY (book_id)\n\tREFERENCES book (id)\n\tON UPDATE RESTRICT\n\tON DELETE RESTRICT\n;\n\n\nALTER TABLE lending_history\n\tADD FOREIGN KEY (member_id)\n\tREFERENCES member (id)\n\tON UPDATE RESTRICT\n\tON DELETE RESTRICT\n;\n\n\n\n" }, { "alpha_fraction": 0.7077562212944031, "alphanum_fraction": 0.7077562212944031, "avg_line_length": 29.125, "blob_id": "b4e13debeadc3a93fbaf1e8b745df48acd90971a", "content_id": "2317d593309df9efedc89ebb3b2af8476706915d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 722, "license_type": "no_license", "max_line_length": 62, "num_lines": 24, "path": "/entity/LendingHistory.py", "repo_name": "scallop722/mylib", "src_encoding": "UTF-8", "text": "from sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy import Column, Integer, String, DateTime\nfrom sqlalchemy.sql.schema import ForeignKey\nfrom sqlalchemy.sql.sqltypes import Boolean\nfrom setting import Base\nfrom setting import ENGINE\n\nclass LendingHistory(Base):\n \"\"\"\n LendingHistory\n \"\"\"\n\n __tablename__ = 'lending_history'\n id = Column(Integer, primary_key=True, autoincrement=True)\n book_id = Column(Integer, ForeignKey('book.id'))\n member_id = Column(Integer, ForeignKey('member.id'))\n lending_date = Column(DateTime)\n returned = Column(Boolean)\n\n def __init__(self, name):\n self.name = name\n\nif __name__ == \"__main__\":\n Base.metadata.create_all(bind=ENGINE)" } ]
6
Th3R3p0/fluent-python
https://github.com/Th3R3p0/fluent-python
91fa5ad7fc647c7041320d1c63e7372cea035438
5346d04be0360643c83204d6d7c5d7d0c031dfdf
fd097014b1d1695df7e535e7e10156ac699efed5
refs/heads/master
2021-01-22T18:57:43.082566
2017-08-10T02:36:53
2017-08-10T02:36:53
85,137,480
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6973415017127991, "alphanum_fraction": 0.7177914381027222, "avg_line_length": 39.75, "blob_id": "76ca7e94873f57a939eed395b9fa9d06608bc008", "content_id": "8763878f095174613e8327012fa5bfee38e4420b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 490, "license_type": "no_license", "max_line_length": 114, "num_lines": 12, "path": "/chap4/bytes.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "cafe = bytes('café', encoding='utf_8')\nprint(cafe)\nprint(cafe[0])\nprint(cafe[:1])\ncafe_arr = bytearray(cafe)\nprint(cafe_arr)\nprint(cafe_arr[-1:])\n\nprint(bytes.fromhex('31 4B CE A9'))\n# for bytes in the printable ascii range (from space to ~) the ASCCI characters themeselves are used\n# for bytes corresponding to tab, newline, cariage return and \\, the escape sequences are used (\\t, \\n, \\r and \\\\)\n# for every other byte, the hexadecimal escape sequence is used. for ex a null byte: \\x00\n" }, { "alpha_fraction": 0.6552315354347229, "alphanum_fraction": 0.6552315354347229, "avg_line_length": 22.239999771118164, "blob_id": "4aa1404f4dc8692e074f3c39e99375947c3c2495", "content_id": "46cdb11f85f013c2d904c89ef7a2d982de93b9a3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 583, "license_type": "no_license", "max_line_length": 96, "num_lines": 25, "path": "/chap14/sentence_gen.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "import re\nimport reprlib\n\nRE_WORD = re.compile('\\w+')\n\nclass Sentence:\n\n def __init__(self, text):\n self.text = text\n self.words = RE_WORD.findall(text)\n\n def __repr__(self):\n return 'Sentence({})'.format(reprlib.repr(self.text))\n\n def __iter__(self):\n for word in self.words:\n yield word\n # this return is not needed\n # there is no need to catch an exception. The generator function doesn't raise StopIteration\n # it simply exits when it's done producing values\n return\n\ns = Sentence(\"Hello again world. It's rainy today\")\nfor i in s:\n print(i)\n\n\n" }, { "alpha_fraction": 0.7990196347236633, "alphanum_fraction": 0.7990196347236633, "avg_line_length": 67, "blob_id": "24424d394e1ca58459812b7d49bc6e2c1ab9913e", "content_id": "7a86dad81438fc9da2337a82f0638c11e2ec5ffe", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 204, "license_type": "no_license", "max_line_length": 185, "num_lines": 3, "path": "/readme.md", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "## Fluent Python\n\nI am currently working my way through Fluent Python to introduce myself to some concepts of Python which I have never used. This repository will be updated with the code from this book.\n" }, { "alpha_fraction": 0.5341841578483582, "alphanum_fraction": 0.6499544382095337, "avg_line_length": 20.076923370361328, "blob_id": "3eeb79ace8edc81c0f32994c75563bdefbf472dc", "content_id": "8244b33854e45492d435c435f0c7d7d59cd93e8f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1097, "license_type": "no_license", "max_line_length": 119, "num_lines": 52, "path": "/chap2/slices.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "l = [10, 20, 30, 40, 50, 60]\nprint(l[:2])\n\n\n# slicing: [start:stop:step]\ns = 'bicycle'\n# slice x steps\nprint(s[::3])\n# slice reverse\nprint(s[::-1])\n\nprint()\ninvoice = \"\"\"\n123456789012345678901234567890123456789012\n1909 Book 1 $9.99 3 $29.97\n1910 Book 2 $9.99 1 $9.99\n\"\"\"\nSKU = slice(0, 6)\nDESCRIPTION = slice(6, 20)\nUNIT_PRICE = slice(20, 30)\nQUANTITY = slice(30,34)\nITEM_TOTAL = slice(44, None)\nline_items = invoice.split('\\n')[2:]\nfor item in line_items:\n print(item[UNIT_PRICE], item[DESCRIPTION])\n\n# assigning to slices\nl = list(range(10))\nprint(l)\nl[2:5] = [20, 30]\nprint(l)\ndel l[5:7]\nprint(l)\n\n# the right side must be iterable even if it is just one item\nl[2:4] = [100]\n\nprint()\n# beware of using a*n when a is a sequence containing mutable items\nlist1 = [['a']] * 3\nprint(list1)\n# changing item zero in the array modifies all items because they are references to the same item rather than their own\n# unique items\nlist1[0][0]='b'\nprint(list1)\nprint()\n\n# correct way to do this\nlist2 = [['a'] for i in range(3)]\nprint(list2)\nlist2[0][0]='b'\nprint(list2)\n\n" }, { "alpha_fraction": 0.6261342763900757, "alphanum_fraction": 0.6606170535087585, "avg_line_length": 15.666666984558105, "blob_id": "778ea6e9930ced16ce04f1758eb9ed1ad2cb285c", "content_id": "77ad5c0e9bd58afe38f41de96d9a839e1f4fe2fe", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 551, "license_type": "no_license", "max_line_length": 74, "num_lines": 33, "path": "/chap14/how_gen_funcs_work.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "# any function that contains the yield keyword is a generator function\n# usually the body of a generator function has a loop, but not necessarily\n\n# generator fuctions `yield` or `produce` values\n# other functions `return` values\n\ndef gen_123():\n yield 1\n yield 2\n yield 3\n yield 4\n\nprint(gen_123)\nprint(gen_123())\n\nfor i in gen_123():\n print(i)\n\ng = gen_123()\nprint(g)\nprint(next(g))\nprint(next(g))\nprint(next(g))\n\ndef gen_AB():\n print('start')\n yield 'A'\n print('continue')\n yield 'B'\n print('end.')\n\nfor c in gen_AB():\n print('-->', c)\n\n" }, { "alpha_fraction": 0.6321607828140259, "alphanum_fraction": 0.6562814116477966, "avg_line_length": 21.11111068725586, "blob_id": "c2156ca0db13660794e047ea05d77caf3b9b5bfc", "content_id": "13861adbbee650d20180bc412c73b8e3bf6c95ad", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 995, "license_type": "no_license", "max_line_length": 53, "num_lines": 45, "path": "/chap14/arith_prog.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "class ArithmeticProgression:\n\n def __init__(self, begin, step, end=None):\n self.begin = begin\n self.step = step\n self.end = end\n\n def __iter__(self):\n result = type(self.begin + self.step)(self.begin)\n forever = self.end is None\n index = 0\n while forever or result < self.end:\n yield result\n index += 1\n result = self.begin + self.step * index\n\n\ndef aritprog_gen(begin, step, end=None):\n result = type(begin+step)(begin)\n forever = end is None\n index = 0\n while forever or result < end:\n yield result\n index += 1\n result = begin + step * index\n\n\nap = ArithmeticProgression(0,1,3)\nprint(list(ap))\n\nap = ArithmeticProgression(1, .5, 3)\nprint(list(ap))\n\nap = ArithmeticProgression(0, 1/3, 1)\nprint(list(ap))\n\nfrom fractions import Fraction\nap = ArithmeticProgression(0, Fraction(1,3), 1)\nprint(list(ap))\n\nfrom decimal import Decimal\nap = ArithmeticProgression(0, Decimal('.1'), .3)\nprint(list(ap))\n\nprint(list(aritprog_gen(0, Decimal('.1'), .3)))\n" }, { "alpha_fraction": 0.6789366006851196, "alphanum_fraction": 0.6830266118049622, "avg_line_length": 26.16666603088379, "blob_id": "adcc3bcce462b67a9e686aec5e25ca3b979c90bb", "content_id": "9f9baa97b4c7d2d3cf05c8727e5679b53b5c2917", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 489, "license_type": "no_license", "max_line_length": 69, "num_lines": 18, "path": "/chap3/sets.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "# a set is a collection of unique objects\nhaystack = set('a b c d e f g h i j k'.split())\nneedles = set('a d z'.split())\n\n# sets allows returning unions, intersections and differences\nfound = len(needles & haystack)\nprint(found)\n\n# if you didn't use sets it would look something like this:\nfound=0\nfor n in needles:\n if n in haystack:\n found += 1\nprint(found)\n\n# however, there is a cost in generating the needle and haystack sets\n# you can also define a set\ns = {'a', 'b', 'c'}\n" }, { "alpha_fraction": 0.6697247624397278, "alphanum_fraction": 0.6926605701446533, "avg_line_length": 30.14285659790039, "blob_id": "c93a2ea430b4e8d90301f40b69e1a18877f2c403", "content_id": "45c3690876c92dc2f79628ea912439adaf0bfe20", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 221, "license_type": "no_license", "max_line_length": 103, "num_lines": 7, "path": "/chap4/characters.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "# when as a string, \"café\" has 4 unicode characters\n# However, when encoded with utf-8, it has 5 bytes because the é is stored in 2 bytes as opposed to one\n\nb = 'café'\nprint(len(b))\nb = b.encode('utf-8')\nprint(len(b))\n" }, { "alpha_fraction": 0.6691449880599976, "alphanum_fraction": 0.6802973747253418, "avg_line_length": 25.899999618530273, "blob_id": "5b0e0b48166706124754236bd6004c06dd1ccf1e", "content_id": "6efe12008ee5a5e2a94e85246c73a9557c4a9044", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 269, "license_type": "no_license", "max_line_length": 59, "num_lines": 10, "path": "/chap14/aritprog_v3.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "import itertools\n\ndef aritprog_gen(begin, step, end=None):\n first = type(begin + step)(begin)\n ap_gen = itertools.count(first, step)\n if end is not None:\n ap_gen = itertools. takewhile(lambda n: n< end, ap_gen)\n return ap_gen\n\nprint(list(aritprog_gen(0, 1, 5)))\n" }, { "alpha_fraction": 0.7301587462425232, "alphanum_fraction": 0.7460317611694336, "avg_line_length": 30.5, "blob_id": "2380701f5554545ab26fa4a40bc95141eab1c7fb", "content_id": "eceaf13c45941463c8d2b36b3ccab6be6ab4f014", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 189, "license_type": "no_license", "max_line_length": 87, "num_lines": 6, "path": "/chap14/takewhile_example.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "import itertools\n\ngen = itertools.takewhile(lambda n: n<3, itertools.count(1, .5))\nprint(list(gen))\n\n# if you were to not use takewhile, python would try to build a list of endless counter\n" }, { "alpha_fraction": 0.6781609058380127, "alphanum_fraction": 0.704023003578186, "avg_line_length": 22.133333206176758, "blob_id": "59a20e2d9f3daea927b989c8128665b74b63a3e2", "content_id": "90bd32e5352e7730136246d00dce2de6754fb6e6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 348, "license_type": "no_license", "max_line_length": 71, "num_lines": 15, "path": "/chap5/function_as_an_object.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "def factorial(n):\n '''returns n!'''\n return 1 if n<2 else n*factorial(n-1)\n\nprint(factorial(5))\nprint(factorial.__doc__)\nprint(type(factorial))\n\nprint()\n# use a function through a different name and pass function as argument\nfact = factorial\nprint(fact)\nprint(fact(5))\nprint(map(factorial, range(11)))\nprint(list(map(factorial, range(11))))\n\n" }, { "alpha_fraction": 0.7250000238418579, "alphanum_fraction": 0.7315789461135864, "avg_line_length": 33.54545593261719, "blob_id": "074bbf204fd4401810e6c39fe92c2d52ef92baba", "content_id": "608ead5c4295f88310103b6425d4455b7ca3dca9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 760, "license_type": "no_license", "max_line_length": 119, "num_lines": 22, "path": "/chap2/listcomps_vs_genexps.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "symbols = \"$%^&@\"\n\n# listcomp example\nlistcomp = [ord(symbol) for symbol in symbols]\nprint(listcomp)\n\n# genexp example\ngenexp = tuple(ord(symbol) for symbol in symbols)\nprint(genexp)\n\n# \"genexps yields items one by one using the iterator protocol instead of building a whole list just to\n# feed another constructor\"\n\n# use a generator expression if all you're doing is iterating once. If you want to store and use the generated results,\n# then you're probably better off with a list comprehension.\n# http://stackoverflow.com/questions/47789/generator-expressions-vs-list-comprehension\n\n# this is using a generator expression\ncolors = ['black', 'white']\nsizes = ['S', 'M', 'L']\nfor tshirt in ('%s %s' % (c, s) for c in colors for s in sizes):\n print(tshirt)\n" }, { "alpha_fraction": 0.6693744659423828, "alphanum_fraction": 0.6718115210533142, "avg_line_length": 24.625, "blob_id": "d404484060be47cbf0c0e25fb2f958e5c42f4fe3", "content_id": "dc70579d760b16728b947b3e45b22bd017fa30cf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1231, "license_type": "no_license", "max_line_length": 116, "num_lines": 48, "path": "/chap14/sentence.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "import re\nimport reprlib\n\nRE_WORD = re.compile('\\w+')\n\nclass Sentence:\n\n def __init__(self, text):\n self.text = text\n self.words = RE_WORD.findall(text)\n\n def __getitem__(self, index):\n return self.words[index]\n\n def __len__(self):\n return len(self.words)\n\n def __repr__(self):\n return 'Sentence({})'.format(reprlib.repr(self.text))\n\ns = Sentence('\"I wonder how this sentence class will work\", said the student.')\nprint(s)\nprint(len(s))\n\nfor i in s:\n print(i)\n\n# The __iter__ dunder function is not defined, however this funtion is iterable because the __getitem__ is defined\n# Thus, all sequences (str, unicode, list, tuple, buffer, xrange) are iterable because they all have the __getitem__\n# function implemented\nprint(list(s))\n\ns3 = Sentence('Pig and Pepper')\nit = iter(s3)\nprint(it)\n\n# next pops the zero index from the list\nprint(next(it))\nprint(next(it))\nprint(next(it))\n# the next in the next line would through a StopIteration exception because there is no zero index\n# next(it)\n\nit = iter(s3)\nprint(next(it))\nprint(list(iter(it)))\n# the next in the next line would through a StopIteration exception - ??? because it has run through all indexes ???\n# print(next(it))\n\n" }, { "alpha_fraction": 0.6928104758262634, "alphanum_fraction": 0.7189542651176453, "avg_line_length": 27.91891860961914, "blob_id": "8163ef92ac5c290a2d8061ef5d8daed3ec3301ba", "content_id": "93fff552750031e3aa48d4d16e1b4d9735e44bb4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1071, "license_type": "no_license", "max_line_length": 92, "num_lines": 37, "path": "/chap14/gen_mapping_examples.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "# mapping generator functions\n\nsample = [5, 4, 2, 8, 7, 6, 3, 0, 9, 1]\n\nimport itertools\n\nprint('accumulate')\n# accumulate(it, [func])\n# yields accumulated sums\nprint(list(itertools.accumulate(sample)))\n# yields the minimum item\nprint('accumulate - min')\nprint(list(itertools.accumulate(sample, min)))\n# yields the maximum item\nprint('accumulate - max')\nprint(list(itertools.accumulate(sample, max)))\n\nimport operator\nprint('accumulate - mul')\n# yields the multiplied values\nprint(list(itertools.accumulate(sample, operator.mul)))\nprint('accumulate - mul - range')\nprint(list(itertools.accumulate(range(1,11), operator.mul)))\n\nprint('enumerate')\n# enumerate(iterable, start=0)\nprint(list(enumerate('albatroz', 1)))\n\nprint('map')\n# map(func, it1, [it2, ..., itN])\nprint(list(map(operator.mul, range(11), range(11))))\nprint(list(map(operator.mul, range(11), [2, 4, 8])))\n\nprint('starmap')\n# starmap(func, it)\nprint(list(itertools.starmap(operator.mul, enumerate('abatroz', 1))))\nprint(list(itertools.starmap(lambda a, b: b/a, enumerate(itertools.accumulate(sample), 1))))\n\n" }, { "alpha_fraction": 0.6928446888923645, "alphanum_fraction": 0.7190226912498474, "avg_line_length": 29.157894134521484, "blob_id": "e82cabd65133dad5ba61818a5a51dc398696d168", "content_id": "337a9aa478f474f8ef3bf830b4bb81183a660bf5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 573, "license_type": "no_license", "max_line_length": 98, "num_lines": 19, "path": "/chap5/replacements_4_map_filter_reduce.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "from functools import reduce\nfrom operator import add\n\ndef factorial(n):\n '''returns n!'''\n return 1 if n<2 else n*factorial(n-1)\n\nfact = factorial\n\n# map and filter functions vs list comprehensions and generator expressions\nprint(list(map(fact, range(6))))\nprint([fact(n) for n in range(6)])\nprint(list(map(factorial, filter(lambda n: n % 2, range(6)))))\nprint([factorial(n) for n in range(6) if n % 2])\n\n# reduce function vs sum function - readability and performance are better when using sum function\nprint()\nprint(reduce(add, range(100)))\nprint(sum(range(100)))\n" }, { "alpha_fraction": 0.686274528503418, "alphanum_fraction": 0.7098039388656616, "avg_line_length": 22.18181800842285, "blob_id": "5ba10227fd3d2ad5674e3fb54fbccccc1318bafd", "content_id": "414a05533f516d7d3215a270856c2f6909ceee76", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 255, "license_type": "no_license", "max_line_length": 110, "num_lines": 11, "path": "/chap2/keepsorted.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "import bisect\n\na = [2, 5, 3, 8, 4]\nprint(a)\na.sort()\nprint(a)\n\n# sorting can be an expensive. rather than appending a new item to the end of the list and then sorting again,\n# you can use bisect.insort to keep the list sorted\nbisect.insort(a, 4)\nprint(a)\n" }, { "alpha_fraction": 0.731653094291687, "alphanum_fraction": 0.745737612247467, "avg_line_length": 31.095237731933594, "blob_id": "7b667ade0413ea8e4d41e5838ddd5c9a7f4a8c71", "content_id": "2f366ea2f8176d6d8bedfdbf92881067c506a143", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1349, "license_type": "no_license", "max_line_length": 87, "num_lines": 42, "path": "/chap14/gen_filter_examples.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "# filtering generator functions\n\na = 'Aardvark'\n\n# returns True or False if a characters is a vowel\ndef vowel(c):\n return c.lower() in 'aeiou'\n\nprint('filter')\n# filter(predicate, it)\n# will only display characters that return True from the vowel function \nprint(list(filter(vowel, a)))\n\nimport itertools\nprint('filterfalse')\n# filterfalse(predicate, it)\n# will only yield characters that return False from the vowel function\nprint(list(itertools.filterfalse(vowel, a)))\n\nprint('dropwhile')\n# dropwhile(predicate, it)\n# will yield all items after one false value is matched\nprint(list(itertools.dropwhile(vowel, a)))\n\nprint('takewhile')\n# takewhile(predicate, it)\n# will yield only items that are True and drop the rest after a False is matched\nprint(list(itertools.takewhile(vowel, a)))\n\nprint('compress')\n# compress(it, selector_it)\n# consumes two iterables in parallel; will yield items from it when selector_it is True\nprint(list(itertools.compress(a, (1,0,1,1,0,1))))\n\nprint('islice')\n# islice takes (iterator, stop) or (iterator, stop, stop, step=1)\n# the following will top after 4 characters\nprint(list(itertools.islice(a, 4)))\n# the following will start at 4 index and stop at 7 index\nprint(list(itertools.islice(a, 4, 7)))\n# the following will start at 1 index and stop at 7 index stepping by 2\nprint(list(itertools.islice(a, 1, 7, 2)))\n\n" }, { "alpha_fraction": 0.6877862811088562, "alphanum_fraction": 0.6893129944801331, "avg_line_length": 26.29166603088379, "blob_id": "e182c485b41de907500fb3417a8793f9a7d71827", "content_id": "38143dbee2c2f2ecad549b4cc9d4a63aa58ca44b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1310, "license_type": "no_license", "max_line_length": 110, "num_lines": 48, "path": "/chap14/sentence_iter.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "import re\nimport reprlib\n\nRE_WORD = re.compile('\\w+')\n\n# iterables have an __iter__ method that instatiates a new iterator every time\n# Iterators implement a __next__ method that returns individual items and an __iter__ method that returns self\n# iterators are also iterable, but iterables are nto iterators\n\n# to support multiple traversals it must be possible to obtain multiple indepenced interators from the same\n# iterable instance, and each iterator must keep its own internal state.\n# therefore, do not implement __next__ on an iterable\n\nclass Sentence:\n\n def __init__(self, text):\n self.text = text\n self.words = RE_WORD.findall(text)\n\n def __repr__(self):\n return 'Sentence({})'.format(reprlib.repr(self.text))\n\n def __iter__(self):\n return SentenceIterator(self.words)\n\n\nclass SentenceIterator:\n\n def __init__(self, words):\n self.words = words\n self.index = 0\n\n def __next__(self):\n try:\n word = self.words[self.index]\n except IndexError:\n raise StopIteration()\n self.index += 1\n return word\n\n # it is not necessary for __iter__ to be defined for this to work\n # however, iterators are supposed to implement both __next__ and __iter__\n def __iter__(self):\n return self\n\ns = Sentence(\"hello world. it's sunny outside today\")\nfor i in s:\n print(i)\n" }, { "alpha_fraction": 0.7056074738502502, "alphanum_fraction": 0.7102803587913513, "avg_line_length": 52.5, "blob_id": "a6c30642681eba819860180b7db9536f12d03c1a", "content_id": "52c139f576fd6a6e9fc61e5872d592d8fc7d6501", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 214, "license_type": "no_license", "max_line_length": 88, "num_lines": 4, "path": "/chap5/anonymous_functions.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "fruits = ['strawberry', 'fig', 'apple', 'cherry', 'raspberry', 'banana']\n\n# compare this to higher_order_functions.py where you had to define the function reverse\nprint(sorted(fruits, key=lambda word: word[::-1]))\n" }, { "alpha_fraction": 0.6793003082275391, "alphanum_fraction": 0.728863000869751, "avg_line_length": 19.176469802856445, "blob_id": "9936877249040a998447979c2de9b40e22f3102b", "content_id": "6380db11306ab54f5af506526200005c93ca3233", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 343, "license_type": "no_license", "max_line_length": 103, "num_lines": 17, "path": "/chap2/deques.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "# inserting or removing items from the left of a list is costly because the entire list must be shifted\n# introducing deque:\n\nfrom collections import deque\n\ndq = deque(range(10), maxlen=10)\nprint(dq)\ndq.rotate(3)\nprint(dq)\ndq.rotate(-4)\nprint(dq)\ndq.appendleft(-1)\nprint(dq)\ndq.extend([11, 22, 33])\nprint(dq)\ndq.extendleft([10, 20])\nprint(dq)\n" }, { "alpha_fraction": 0.6743738055229187, "alphanum_fraction": 0.6743738055229187, "avg_line_length": 21.521739959716797, "blob_id": "5d25bd7d24be590820406ba8f2ca8f21a7fd5a8e", "content_id": "b456580b24f34dbf8bbc6ced553c72f75b881faa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 519, "license_type": "no_license", "max_line_length": 115, "num_lines": 23, "path": "/chap14/sentence_gen2.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "import re\nimport reprlib\n\n# lazy implementation\n# This uses less memory. re.match loads all matches into a list. re.finditer only produces the next match on demand\n\nRE_WORD = re.compile('\\w+')\n\nclass Sentence:\n\n def __init__(self, text):\n self.text = text\n\n def __repr__(self):\n return 'Sentence({})'.format(reprlib.repr(self.text))\n\n def __iter__(self):\n for match in RE_WORD.finditer(self.text):\n yield match.group()\n\ns = Sentence(\"Hello world. Just another day in paradise\")\nfor i in s:\n print(i)\n\n" }, { "alpha_fraction": 0.6764112710952759, "alphanum_fraction": 0.71875, "avg_line_length": 35.74074172973633, "blob_id": "56227843b67f9aebf8e0c2b4c1ae75841b23c2ea", "content_id": "3e71ec82bd1f033958999aec51e92be4ac1635de", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1989, "license_type": "no_license", "max_line_length": 116, "num_lines": 54, "path": "/chap4/codecs.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "for codec in ['latin_1', 'utf_8', 'utf_16']:\n print(codec, 'El Niño'.encode(codec), sep='\\t')\n\nprint('\\n')\n# error handling\ncity = 'São Paulo'\nprint(city.encode('utf-8'))\nprint(city.encode('utf-16'))\nprint(city.encode('iso8859_1'))\nprint()\n\n# the following would error out:\n# print(city.encode('cp437'))\n\n# Traceback (most recent call last):\n# File \"/Users/justinmassey/Documents/code/fluent-python/chap4/codecs.py\", line 12, in <module>\n# print(city.encode('cp437'))\n# File \"/Users/justinmassey/virtualenvs/fluent-python/bin/../lib/python3.6/encodings/cp437.py\", line 12, in encode\n# return codecs.charmap_encode(input,errors,encoding_map)\n# UnicodeEncodeError: 'charmap' codec can't encode character '\\xe3' in position 1: character maps to <undefined>\n\n# the following handles the error\nprint(city.encode('cp437', errors='ignore'))\nprint(city.encode('cp437', errors='replace'))\nprint(city.encode('cp437', errors='xmlcharrefreplace'))\nprint()\n\n# the following deals with UnicodeDecodeError\noctets = b'Montr\\xe9al'\nprint(octets.decode('cp1252'))\nprint(octets.decode('iso8859_7'))\nprint(octets.decode('koi8_r'))\n\n# the following would error out\n# print(octets.decode('utf_8'))\n#\n# Traceback (most recent call last):\n# File \"/Users/justinmassey/Documents/code/fluent-python/chap4/codecs.py\", line 35, in <module>\n# print(octets.decode('utf_8'))\n# UnicodeDecodeError: 'utf-8' codec can't decode byte 0xe9 in position 5: invalid continuation byte\n\nprint(octets.decode('utf-8', errors='replace'))\nprint()\n\n# BOM: Byte Order Mark\n# BOMs are needed to specify if the CPU is using little endian where the LSB comes first ex: [0,1] vs [1,0]\nu16 = 'El Niño'.encode('utf_16')\nprint(u16)\n# the \"xffxfe\" denotes the BOM stating that it is using little ending byte ordering\n# the following is an example of the bytes in LE vs BE (see the abbreviations after utf_16\nu16le = 'El Niño'.encode('utf_16le')\nprint(list(u16le))\nu16be = 'El Niño'.encode('utf_16be')\nprint(list(u16be))\n" }, { "alpha_fraction": 0.6557376980781555, "alphanum_fraction": 0.6688524484634399, "avg_line_length": 24.41666603088379, "blob_id": "d6945b6cf710e978a8d5f1fe9303df7f6109ea14", "content_id": "32c448984561b999aab4a1206288ecc36ce2ae1d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 915, "license_type": "no_license", "max_line_length": 82, "num_lines": 36, "path": "/chap3/missingkeys.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "from collections import defaultdict\n\n# slow way in handling dicts with missing keys\ndict1 = {}\n\ncountries = ['us', 'cn', 'in']\nvalues = [\"value\", \"value2\"]\n\nfor country in countries:\n for value in values:\n # one search for country\n if country not in dict1:\n # possible third search for country, if it doesn't exist to add []\n dict1[country] = []\n # second search for country to append value\n dict1[country].append(value)\nprint(dict1)\n\n\n\n# better way to handle dicts with missing keys\ndict2 = {}\n\nfor country in countries:\n for value in values:\n # only one search for country\n dict2.setdefault(country, []).append(value)\nprint(dict2)\n\n\n# another way to handle dicts with missing keys - use defaultdict from collections\ndict3 = defaultdict(list)\nfor country in countries:\n for value in values:\n dict3[country].append(value)\nprint(dict2)\n" }, { "alpha_fraction": 0.6967741847038269, "alphanum_fraction": 0.7241935729980469, "avg_line_length": 31.63157844543457, "blob_id": "14364b71cc7bb27a6f8ae1ee9cd70d5c7dc8e468", "content_id": "a5273b84f5cf2b55f4dced9178d687fb8e95d175", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 620, "license_type": "no_license", "max_line_length": 86, "num_lines": 19, "path": "/chap2/arrays.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "# if only storing numbers, use an array. array.array is more efficient than a list\n\nfrom array import array\nfrom random import random\n\nfloats = array('d', (random() for i in range(10**7)))\nprint(floats[-1])\nfp = open('floats.bin', 'wb')\n# saving using tofile is 7 times faster than writing one float per line in a text file\nfloats.tofile(fp)\nfp.close()\nfloats2 = array('d')\nfp = open('floats.bin', 'rb')\n# using fromfile is 60 times faster than reading numbers from a text file\nfloats2.fromfile(fp, 10**7)\nfp.close()\nprint(floats2[-1])\nassert floats == floats2\n# this example also saves over 50% disk space for the file\n" }, { "alpha_fraction": 0.7089552283287048, "alphanum_fraction": 0.7238805890083313, "avg_line_length": 15.75, "blob_id": "ba984f7f4a4307153bcda7141160e6c12eea577b", "content_id": "374d9eef291c8ccc3b84a494489a8231eb3acf94", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 134, "license_type": "no_license", "max_line_length": 34, "num_lines": 8, "path": "/chap14/itertools_count.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "import itertools\n\ngen = itertools.count(1, .5)\nprint(next(gen))\nprint(next(gen))\nprint(next(gen))\n\n# the itertools, count never stops\n" }, { "alpha_fraction": 0.6612411141395569, "alphanum_fraction": 0.6856561303138733, "avg_line_length": 28.787878036499023, "blob_id": "264a88afbab3e3b61d6099e8c857bb25ad59585b", "content_id": "3bf0db3cb2f8317b4b7a9f84a3b4cbbcced18830", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 983, "license_type": "no_license", "max_line_length": 66, "num_lines": 33, "path": "/chap14/gen_merge_examples.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "import itertools\n\nprint('chain')\n# chain(it1, ... itN)\nprint(list(itertools.chain('ABC', range(2))))\nprint(list(itertools.chain(enumerate('ABC'))))\n\nprint('chain.from_iterable')\n# chain.from_iterable(it)\nprint(list(itertools.chain.from_iterable((enumerate('ABC')))))\n\nprint('zip')\n# zip(it1, ..., itN)\nprint(list(zip('ABC', range(5))))\nprint(list(zip('ABC', range(5), [10, 20, 30, 40])))\n\nprint('zip_longest')\n# zip_longest(it1, ..., itN, fillvalue=None)\nprint(list(itertools.zip_longest('ABC', range(5))))\nprint(list(itertools.zip_longest('ABC', range(5), fillvalue='?')))\n\nprint('product')\n# product(it1, ..., itN, repeat=1)\nprint(list(itertools.product('ABC', range(2))))\n\nsuits = 'spades hearts diamonds clubs'.split()\nprint(list(itertools.product('AK',suits)))\nprint(list(itertools.product('ABC')))\nprint(list(itertools.product('ABC', repeat=2)))\nprint(list(itertools.product(range(2), repeat=3)))\n\nrows = itertools.product('AB', range(2), repeat=2)\nfor row in rows: print(row)\n" }, { "alpha_fraction": 0.5179211497306824, "alphanum_fraction": 0.6263440847396851, "avg_line_length": 22.25, "blob_id": "389b8e856a114c19621b5ce6301eace7b4bc6b17", "content_id": "f6662ac1d8de2f31e8e4cab805fe8b3e2abc1684", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1116, "license_type": "no_license", "max_line_length": 71, "num_lines": 48, "path": "/chap2/tuples.py", "repo_name": "Th3R3p0/fluent-python", "src_encoding": "UTF-8", "text": "from collections import namedtuple\n\na = (2, 4)\nprint(a)\n\n\n# tuple unpacking\nb, c = a\nprint(b)\nprint(c)\n\n# assign rest of values in tuple to a tuple\nd, e, *rest = range(5)\nprint(d, e, rest)\n\n# unpacking nested tuples\nmetro_areas = [\n ('Tokyo', 'JP', 36.393, (35.689722, 139.691667)),\n ('Delhi NCR', 'IN', 21.935, (28.613889, 77.208889)),\n ('Mexico City', 'MX', 20.142, (40.808611, -74.020386))\n]\n\nprint()\nprint('{:15} | {:^9} | {:^9}'.format('', 'lat.', 'long.'))\nfmt = '{:15} | {:9.4f} | {:9.4f}'\nfor name, cc, pop, (lattitude, longitude) in metro_areas:\n if longitude >= 0:\n print(fmt.format(name, lattitude, longitude))\n\nprint()\n\n# named tuples\nCity = namedtuple('City', 'name country population coordinates')\ntokyo = City('Tokyo', 'JP', 36.933, (35.689722, 139.691667))\nprint(tokyo)\nprint(tokyo.population)\nprint()\nprint(City._fields)\n\nprint()\nLatLong = namedtuple('LatLong', 'lat long')\ndelhi_data = ('Delhi NCR', 'IN', 21.935, LatLong(28.613889, 77.208889))\ndelhi = City._make(delhi_data)\nprint(delhi._asdict())\n\nprint()\nfor key, value in delhi._asdict().items():\n print(key + ':', value)\n" } ]
27
ThatRumbu/discord-rumbot
https://github.com/ThatRumbu/discord-rumbot
700e39cee193b9bb7417bf0ddca92b39d95438e9
1d4a1ecbc7df137048d9645ce3f7046c9e600cba
fc36f0355ea527f56f57b17a5e0456ae44117e71
refs/heads/master
2021-05-19T09:08:29.894948
2020-03-31T14:46:58
2020-03-31T14:46:58
251,620,883
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.751240611076355, "alphanum_fraction": 0.7573235034942627, "avg_line_length": 53.8070182800293, "blob_id": "3f9e2a4dfe21ab90d3c7ce399ba2bc4e01483c57", "content_id": "2d84740e6f0bd87ae80bc3ef2eeb81b43316bf98", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 6253, "license_type": "no_license", "max_line_length": 612, "num_lines": 114, "path": "/readme.md", "repo_name": "ThatRumbu/discord-rumbot", "src_encoding": "UTF-8", "text": "# Rumbu's Rumbot for Discord\n\nSo I made this bot cause I was bored and in quarantine (thanks coronavirus), if you want to snag any parts of this bot that's fine as long as you don't claim it as yours (duh) and give credit where credit is due. \n\nKeep in mind everything is modular so theoretically you could just fork the whole thing and add your own cogs for an easy plug and play solution, would def appreciate a heads up though in case I fucked something up and didn't end up fixing it.\n\nSpeaking of modularity, the main `rumbot.py` is nothing more than an albeit overcomplicated and poorly designed cogs management system that will have no problem using third-party cogs. Just remember if you're using this you'll need to throw your bot's token in the `.env`. Anywho, with no futher ado I shall present to you my undeniably uneccessary documentation.\n\n## Requirements\n\nSo you should be fine once you install `discord.py` and `python-dotenv` (tbh just put the token in the main file, this is overly fancy for no reason) but here's my `pip freeze` anyway. It may or may not be updated to the latest version of the bot, just check the modification dates I guess lol.\n```\naiohttp==3.6.2\nasync-timeout==3.0.1\nattrs==19.3.0\nchardet==3.0.4\ndiscord.py==1.3.2\nidna==2.9\nmultidict==4.7.5\npython-dotenv==0.12.0\nwebsockets==8.1\nyarl==1.4.2\n```\n\n## Usage\n\nThe layout's kinda wack already so I'll just chuck the basic commands here I s'pose, it's just cog management shit. The default command prefix is `..` so I'll be using that here, but it's easy asf to change so no stress if you think it's weird. I also put in way more aliases than I needed to so I'm not gonna put them in here but if it would make sense it'll probably work.\n\n### cogs\n\n`..cogs`\n\nThis'll just print a spicy looking list of loaded and unloaded cogs. Throw in whatever arguments you want cause they'll do *absolutely nothing*.\n\n`..cogs load <extension> [*extension]`\n\nGuess what? This one loads cogs :astonishment:\n\nYou can load more than one at once, and there's no need to worry about typos cause I *always* think of everything. Only a monster wouldn't use commas to separate them but hey, it'll work without 'em.\n\n`..cogs unload <extension> [*extension]`\n\nIf you thought that last command was cool, check this bitch out! Same syntax so feel free to throw in more than one at once and it'll get those cogs working like a treat.\n\n`..cogs reload <extension> [*extension]`\n\nDunno really what to say here, just more of the same stuff. Obviously unloaded cogs won't be reloaded, but tpyo consideration certainly isn't unique to this command. (lmao see what i did there?)\n\n## Essentials\n\nSo this cog, *in my honest opinion* should not be removed unless you're dealing with all this shit already because—as the name suggests—I consider it to be essential to the functioning of a discord bot (technically it's not but they're all pretty standard features /shrug). Aside from error handling and `on_ready` confirmation among other useful behind-the-scenes functions, it has a command or two that you may find to be useful. Keep in mind that ease of use commands and other 'essentials' are in the [Base](https://github.com/ThatRumbu/discord-rumbot#Base) extension. I also consider that to be a necessity.\n\n(it's a work in progress ok, there will be more than one command)\n\n### stop\n\n`..stop`\n\nYeah this doesn't take any arguments either, it's literally just stops the bot. No there's no start command cause that's not how this works.\n\nI'mma fill out some space by letting you know that `..kill`, `..die` and `..exit` are all valid aliases. Yes, I know I said I wasn't going to list aliases. No, I don't care that this is blatant hypocrisy.\n\n## Base\n\noops, this be empty. get to work, bud\n\n## Help\n\na more detailed (and better looking) `..help` command. might do some other stuff, idk, I haven't written it yet\n\n## Admin\n\nooh the useful shit! welp guess what, this is really low on my to do list so if you want it bug me\n\n## Projects\n\ncreate and manage channels for private discussion\n\n## Overviewer\n\nallows for integration with [Minecraft Overviewer](https://github.com/overviewer/Minecraft-Overviewer/); I also reckon it's super neat or something, you should check it out\n\n## Dev\n\nthis has got a couple nonessential (crazy, right?) commands that I was using mostly during development, but if you feel like enabling it to do some testing feel free. I'll write some documentation on it at some point, can't be bothered rn even though I just wrote this entire document\n\n## FAQs\n\nNo this isn't a cog you smartass, it's literally just a collection of frequently (not really, if I'm being honest they were arbitrarily decided by me with no purpose other than mentioning a couple of things I wanted to, and further expanding the comedic content of what could have been *extremely* boring documentation (ooh double parentheses—just felt like pointing out that the documentation itself was completely uneccesary, primarily because nobody, not one single person is going to read this. \\*sigh\\*)) asked questions. Here we go.\n\n### Why do you keep referring to your bot as 'Rumbot`? Is this a subconscious reference to your alcohol addiction?\n\nNo, what the fuck. My name is Rumbu so I made a (possibly poor?) pun. I shouldn't have to be explaining this.\n\n### So it's just humour that you use to mask your depression and resultant alcoholism?\n\n1. How is this relevant to the bot?\n2. At what point did I infer that I even drink alcohol?\n3. Alcoholism isn't a joke, please take it seriously\n4. That question doesn't even make sense\n\n### Can I use your bot?\n\nFinally a real question! Yes (I did mention this at the start but ok) you are more than welcome to take parts of Rumbot or snag the whole thing if you'd like, all I ask is you give me fair credit.\n\n### It's not working. Fix your shitty ass-bot.\n\nWell firstly, that's not even a question. Secondly, have you made sure you're using the right syntax? `..help` is your friend.\n\nIf your problem still persists feel free to ask me, just send me a message on discord (@Rumbu#5277)\n\n### Do you cry youself to sleep because nobody finds you funny?\n\nI'm actually done with this shit. No more questions. <sub><sub>Sometimes if I'm drunk and depressed</sub></sub>" }, { "alpha_fraction": 0.6404391527175903, "alphanum_fraction": 0.64226895570755, "avg_line_length": 33.15625, "blob_id": "1de652fcd56388fa0bb1d6d8e4d442371dd479e0", "content_id": "cf44374e957e99b0f8657c9386e6aa7cd5ecef21", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1093, "license_type": "no_license", "max_line_length": 103, "num_lines": 32, "path": "/cogs/Essentials.py", "repo_name": "ThatRumbu/discord-rumbot", "src_encoding": "UTF-8", "text": "import discord\nfrom discord.ext import commands\nfrom time import strftime\n\nclass Essentials(commands.Cog):\n\n def __init__(self, client):\n self.client = client\n\n # Initial startup confirmation\n @commands.Cog.listener()\n async def on_ready(self):\n print(strftime('\\n%b %d, %Y'))\n print('Connected to %s servers\\n' % (str(len(self.client.guilds))))\n print(strftime('%X ') + 'Logged in as {0.user}'.format(self.client))\n\n # Error reporting\n @commands.Cog.listener()\n async def on_command_error(self, ctx, error):\n report = discord.Embed(colour=0xffffff)\n report.add_field(name='Command invoked incorrectly, run ..help for correct usage', value=error)\n await ctx.send(embed=report)\n\n # Kill command\n @commands.command(name='stop', aliases=['kill', 'exit', 'die'])\n async def kill_bot_command(self, ctx):\n print(strftime('%X ') + 'User %s has killed Rumbot' % (ctx.author))\n await ctx.channel.send('ouch')\n await self.client.logout()\n\ndef setup(client):\n client.add_cog(Essentials(client))\n" }, { "alpha_fraction": 0.6102383136749268, "alphanum_fraction": 0.6176522374153137, "avg_line_length": 40.64706039428711, "blob_id": "38810a225d5bd05b94d75a7d9d0cb55bb4dcbfd5", "content_id": "a37c2aa88d7159951632a829993941092395de9c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5669, "license_type": "no_license", "max_line_length": 125, "num_lines": 136, "path": "/Rumbot.py", "repo_name": "ThatRumbu/discord-rumbot", "src_encoding": "UTF-8", "text": "# Rumbot by Rumbu (@Rumbu#5277)\n# Discord developer portal - https://discordapp.com/developers/applications/\n# Add Rumbot to your server - https://discordapp.com/oauth2/authorize?client_id=617989837500448788&scope=bot\n\nimport os\nimport discord\nfrom dotenv import load_dotenv\nfrom discord.ext import commands\nfrom time import strftime\n\nload_dotenv()\ncmd_prefix = '..'\nclient = commands.Bot(command_prefix=cmd_prefix)\n\n# Default cogs\nloaded_cogs = ['cogs.Essentials']\n\nif __name__ == '__main__':\n for ext in loaded_cogs:\n client.load_extension(ext)\n\n# Cog management group\[email protected](name='cogs', aliases=['cog', 'modules', 'ext', 'exts', 'extension', 'extensions'], invoke_without_command=True)\nasync def cog_mgmt(ctx):\n \n # List cogs\n unloaded_cogs = []\n for ext in os.listdir('./cogs'):\n if ext.endswith('.py') and 'cogs.' + ext[:-3] not in loaded_cogs:\n unloaded_cogs.append(ext[:-3])\n unloaded_cogs.sort()\n\n loaded_list = ''\n unloaded_list = ''\n if loaded_cogs == []:\n loaded_list = '—'\n if unloaded_cogs == []:\n unloaded_list = '—'\n\n for ext in loaded_cogs:\n loaded_list = loaded_list + str(ext)[5:] + '\\n'\n for ext in unloaded_cogs:\n unloaded_list = unloaded_list + (ext + '\\n')\n\n list_cogs = discord.Embed(title='Available Cogs', colour=0xfffffe)\n list_cogs.add_field(name='Loaded Cogs', value=loaded_list)\n list_cogs.add_field(name='Unloaded Cogs', value=unloaded_list)\n list_cogs.set_footer(text='Use %shelp to view correct usage and syntax for cogs' % cmd_prefix)\n await ctx.send(embed=list_cogs)\n\n# Load cogs\n@cog_mgmt.command(name='load', aliases=['enable', 'start', 'add'])\nasync def cog_mgmt_load(ctx, *extension):\n\n # Generate list\n unloaded_cogs = []\n for ext in os.listdir('./cogs'):\n ext = 'cogs.' + ext\n if ext.endswith('.py') and ext[:-3] not in loaded_cogs:\n unloaded_cogs.append(ext[:-3])\n \n # Validity check + load\n for ext in extension:\n ext = 'cogs.' + ext.replace(',', '')\n if ext not in loaded_cogs and ext not in unloaded_cogs:\n cog_load_info = discord.Embed(title=ext + ' does not exist', colour=0xffffff)\n cog_load_info.set_footer(text='Ensure capitalisation is correct, use %shelp for correct usage' % cmd_prefix)\n await ctx.send(embed=cog_load_info)\n elif ext in loaded_cogs:\n cog_load_info = discord.Embed(title=ext + ' is already loaded', colour=0xffffff)\n cog_load_info.set_footer(text='Use %scogs reload to update' % cmd_prefix)\n await ctx.send(embed=cog_load_info)\n else:\n client.load_extension(ext)\n loaded_cogs.append(ext)\n print('%s %s has been enabled by %s in %s' % (strftime('%X'), ext, ctx.author, ctx.guild))\n cog_load_info = discord.Embed(title=ext + ' has been successfully loaded', colour=0xffffff)\n await ctx.send(embed=cog_load_info)\n\n# Unload cogs\n@cog_mgmt.command(name='unload', aliases=['disable', 'stop', 'remove'])\nasync def cog_mgmt_unload(ctx, *extension):\n\n # Generate list\n unloaded_cogs = []\n for ext in os.listdir('./cogs'):\n ext = 'cogs.' + ext\n if ext.endswith('.py') and ext[:-3] not in loaded_cogs:\n unloaded_cogs.append(ext[:-3])\n \n # Validity check + load\n for ext in extension:\n ext = 'cogs.' + ext.replace(',', '')\n if ext not in loaded_cogs and ext not in unloaded_cogs:\n cog_load_info = discord.Embed(title=ext + ' does not exist', colour=0xffffff)\n cog_load_info.set_footer(text='Ensure capitalisation is correct, use %shelp for correct usage' % cmd_prefix)\n await ctx.send(embed=cog_load_info)\n elif ext in unloaded_cogs:\n cog_load_info = discord.Embed(title=ext + ' is already unloaded', colour=0xffffff)\n await ctx.send(embed=cog_load_info)\n else:\n client.unload_extension(ext)\n loaded_cogs.remove(ext)\n print('%s %s has been disabled by %s in %s' % (strftime('%X'), ext, ctx.author, ctx.guild))\n cog_load_info = discord.Embed(title=ext + ' has been successfully unloaded', colour=0xffffff)\n await ctx.send(embed=cog_load_info)\n\n# Reload cogs\n@cog_mgmt.command(name='reload', aliases=['update', 'restart'])\nasync def cog_mgmt_reload(ctx, *extension):\n\n # Generate list\n unloaded_cogs = []\n for ext in os.listdir('./cogs'):\n ext = 'cogs.' + ext\n if ext.endswith('.py') and ext[:-3] not in loaded_cogs:\n unloaded_cogs.append(ext[:-3])\n \n # Validity check + load\n for ext in extension:\n ext = 'cogs.' + ext.replace(',', '')\n if ext not in loaded_cogs and ext not in unloaded_cogs:\n cog_load_info = discord.Embed(title=ext + ' does not exist', colour=0xffffff)\n cog_load_info.set_footer(text='Ensure capitalisation is correct, use %shelp for correct usage' % cmd_prefix)\n await ctx.send(embed=cog_load_info)\n elif ext in unloaded_cogs:\n cog_load_info = discord.Embed(title=ext + ' is not loaded', colour=0xffffff)\n cog_load_info.set_footer(text='Use %scogs load to enable' % cmd_prefix)\n await ctx.send(embed=cog_load_info)\n else:\n client.reload_extension(ext)\n print('%s %s has been reloaded by %s in %s' % (strftime('%X'), ext, ctx.author, ctx.guild))\n cog_load_info = discord.Embed(title=ext + ' has been successfully reloaded', colour=0xffffff)\n await ctx.send(embed=cog_load_info)\n\nclient.run(os.getenv('BOT_TOKEN'))\n\n" } ]
3
abosloh/sleep
https://github.com/abosloh/sleep
2f89cbb6352cebaaaa31063d4f187aae49a2a3e3
48f2460ad9ffc3407db06851fe8ba6638c0f78f4
cae428582296e1fd30dbfae6d202484e4af0721e
refs/heads/master
2021-01-22T07:39:04.253256
2013-11-04T12:10:50
2013-11-04T12:10:50
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6933333277702332, "alphanum_fraction": 0.7200000286102295, "avg_line_length": 12.636363983154297, "blob_id": "e1d3558eff3c80001563ba6f08950c85ee8c9530", "content_id": "504de246b93cd0d88f44aef439bbf2df589942a4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 150, "license_type": "no_license", "max_line_length": 41, "num_lines": 11, "path": "/README.md", "repo_name": "abosloh/sleep", "src_encoding": "UTF-8", "text": "sleep\n=====\n\nwait seconds\n\nfunction sleep just do wait some seconds \n\n\nsleep(5) # wait 5 seconds and continue\n\nsleep(1) # wait 1 seconds and continue\n" }, { "alpha_fraction": 0.5815384387969971, "alphanum_fraction": 0.5938461422920227, "avg_line_length": 25.25, "blob_id": "65e93d803fd6f6b29cf5078ddb955a236daff9e9", "content_id": "1ca3a65dda001bb81f85d5218d13fcef367dc0ac", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 325, "license_type": "no_license", "max_line_length": 70, "num_lines": 12, "path": "/sleep.py", "repo_name": "abosloh/sleep", "src_encoding": "UTF-8", "text": "\n# name : abosloh\n# email : [email protected]\n\n# sleep the programe 1,2,3,... seconds\ndef sleep(sec):\n \n t = int(time()) # get initial time as seconds\n # when the real time greater than initial time + parameter seconds\n # break while and exit from function\n while True:\n if time()>t+1:\n break\n \n" } ]
2
Pawel-9215/myStruggles
https://github.com/Pawel-9215/myStruggles
f24efefb10ae6e478bebf036423e7256ef738597
ee6b66b3759b7d0941348862282644eb547a3722
1670a3e47e176c22cf1ec5a52d7b31206993a91f
refs/heads/master
2020-12-15T22:21:23.973340
2020-03-16T19:31:08
2020-03-16T19:31:08
235,271,905
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.5547526478767395, "alphanum_fraction": 0.5775430798530579, "avg_line_length": 23.97222137451172, "blob_id": "5f99edc3362b569b170c4545cc3f7d3491487517", "content_id": "480be8464aae5e7a56bd1fc624d1408ec25001cf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1799, "license_type": "no_license", "max_line_length": 102, "num_lines": 72, "path": "/game/version1/fx.py", "repo_name": "Pawel-9215/myStruggles", "src_encoding": "UTF-8", "text": "import pyglet\nimport random\nfrom random import randint\n\nclass ParticleFX():\n\n #x_pos = 0\n #y_pos = 0\n #life = 2\n\n def __init__(self, x, y, life=4, count=40, speed=180):\n self.x_pos = x\n self.y_pos = y\n self.life = life\n self.dead = False\n self.count = count\n self.speed = speed\n pyglet.clock.schedule_once(self.die, self.life)\n self.particles = self.generate_particles()\n self.vectors = self.generate_vectors()\n self.clock = 1\n self.particle_color = [185,197,220]\n\n def generate_particles(self):\n particles = []\n for i in range(self.count):\n particles.append([self.x_pos, self.y_pos])\n return particles\n\n def generate_vectors(self):\n vectors = []\n for i in range(self.count):\n vectors.append([random.random(), random.random()])\n return vectors\n\n\n\n def die(self, dt):\n self.dead = True\n\n def delete(self):\n del self\n\n def update(self, dt):\n self.clock += 1\n if self.speed >= 5:\n self.speed -= 1\n for i in range(self.count):\n self.particles[i][0] += self.vectors[i][0]*randint(self.speed-3, self.speed+3)*dt\n self.particles[i][1] += self.vectors[i][1]*randint(self.speed-3, self.speed+3)*dt\n\n if min(self.particle_color) > 2:\n self.particle_color[0] -= 1\n self.particle_color[1] -= 1\n #self.particle_color[2] -= 1\n\n #self.draw()\n\n \n\n def draw(self):\n coords = []\n colors = []\n for particle in self.particles:\n coords.append(int(particle[0]))\n coords.append(int(particle[1]))\n colors.extend(self.particle_color)\n #print(coords)\n\n \n pyglet.graphics.draw(len(self.particles), pyglet.gl.GL_POINTS, ('v2i', coords), ('c3B', colors))\n pass\n\n" }, { "alpha_fraction": 0.5855161547660828, "alphanum_fraction": 0.6086286306381226, "avg_line_length": 18.696969985961914, "blob_id": "a17debb0116ab02973ad4203c536a49c203bcec1", "content_id": "ab367b9b6791045f157e315e3c41d023a4ffee5a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 649, "license_type": "no_license", "max_line_length": 157, "num_lines": 33, "path": "/piggy.py", "repo_name": "Pawel-9215/myStruggles", "src_encoding": "UTF-8", "text": "import pyglet\nfrom pyglet.window import mouse\n\nwindow = pyglet.window.Window()\n\nlabel = pyglet.text.Label(\"Hello, world!!!!!\", font_name=\"Arial\", font_size = 16, x=window.width//2, y=window.height//2, anchor_x=\"center\",anchor_y=\"center\")\nw = 12\[email protected]\ndef on_draw():\n global w\n\n window.clear()\n label.draw()\n some_points(w)\n \n\ndef some_points(ab):\n\n pyglet.graphics.draw(2, pyglet.gl.GL_POINTS,\n ('v2i', (10, ab, 30, 35))\n )\n\n\[email protected]\ndef on_mouse_press(x, y, button, modifiers):\n global w\n if button == mouse.LEFT:\n print(\"x = \", x, \"y = \", y)\n label.x = x\n label.y = y\n w += 2\n\npyglet.app.run()" }, { "alpha_fraction": 0.6498599648475647, "alphanum_fraction": 0.6554622054100037, "avg_line_length": 26.538461685180664, "blob_id": "ddf61007d02f60772290b357e9dc5bd26ecbced6", "content_id": "1f03926082d2149d8c141f0db746125520e0d938", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 357, "license_type": "no_license", "max_line_length": 75, "num_lines": 13, "path": "/game/version1/bullet.py", "repo_name": "Pawel-9215/myStruggles", "src_encoding": "UTF-8", "text": "import pyglet \nimport resources \nimport physicalobject\n\nclass Bullet(physicalobject.PhysicalObject):\n # bah bah\n def __init__(self, *args, **kwargs):\n super(Bullet, self).__init__(resources.bullet_image, *args, **kwargs)\n pyglet.clock.schedule_once(self.die, 1.0)\n self.is_bullet = True\n \n def die(self, dt):\n self.dead = True" }, { "alpha_fraction": 0.6646655201911926, "alphanum_fraction": 0.6946826577186584, "avg_line_length": 33.29411697387695, "blob_id": "0c723e6c687ac8dda268248c8838016eae434d24", "content_id": "04d09ae52f498202832d2252b91b686504d01b39", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1166, "license_type": "no_license", "max_line_length": 99, "num_lines": 34, "path": "/game/version1/load.py", "repo_name": "Pawel-9215/myStruggles", "src_encoding": "UTF-8", "text": "import pyglet\nimport random\nfrom random import randint\nimport math\nimport resources\nimport physicalobject\nimport util\nimport asteroid\n\ndef asteroids(num_asteroids, player_position, batch=None):\n asteroids = []\n #asteroid_types = [resources.asteroid_big_image, resources.asteroid_small_image]\n for i in range(num_asteroids):\n asteroid_x, asteroid_y = player_position\n while util.distance((asteroid_x, asteroid_y), player_position) < 100:\n asteroid_x = randint(0,800)\n asteroid_y = randint(0,800)\n #asteroid_size = randint(0,1)\n new_asteroid = asteroid.Asteroid(x = asteroid_x, y = asteroid_y, batch=batch)\n new_asteroid.rotation = randint(0,360)\n new_asteroid.velocity_x = random.random()*40*randint(-1, 1)\n new_asteroid.velocity_y = random.random()*40*randint(-1, 1)\n asteroids.append(new_asteroid)\n return asteroids\n\n\n\ndef player_lives(num_lives, batch=None):\n player_lives = []\n for i in range(num_lives):\n new_sprite = pyglet.sprite.Sprite(img=resources.player_image, x=785-i*30, y=785, batch=batch)\n new_sprite.scale = 0.5\n player_lives.append(new_sprite)\n return player_lives\n" }, { "alpha_fraction": 0.4628966152667999, "alphanum_fraction": 0.4828556776046753, "avg_line_length": 26.521127700805664, "blob_id": "f31c5aa3e0bca31154fd40f1a8b3e97f665a5d8a", "content_id": "e49810a87d0f0ac79f9938de4f919d30ec7c0c16", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3908, "license_type": "no_license", "max_line_length": 240, "num_lines": 142, "path": "/Snake_with_classes.py", "repo_name": "Pawel-9215/myStruggles", "src_encoding": "UTF-8", "text": "import curses\nimport random\nfrom curses import wrapper\nfrom random import randint\n\nWIDTH = 35\nHEIGHT = 20\nMAX_X = WIDTH -2\nMAX_Y = HEIGHT -2\nSNAKE_LEN = 4\nSNAKE_X = SNAKE_LEN + 1\nSNAKE_Y = 3\nTimeout = 150\n\n#119 - up w\n#115 - down s\n#97 - left a\n#100 - right d\n\nUP_KEY = 119\nDOWN_KEY = 115\nLEFT_KEY = 97\nRIGHT_KEY = 100\n\n\n\n\n\nif __name__ == \"__main__\":\n \n #let's try to use wrapper because this book is too fucking dumb to care\n def main(screen):\n\n class Body(object):\n def __init__(self, x, y, char=\"0\"):\n self.x = x\n self.y = y\n self.char = char\n def coor(self):\n return self.x, self.y\n\n class Snake:\n REVERSE_MAP = {UP_KEY:DOWN_KEY, DOWN_KEY:UP_KEY, LEFT_KEY:RIGHT_KEY, RIGHT_KEY:LEFT_KEY}\n\n def __init__(self, x, y, window):\n self.body_list = []\n self.timeout = Timeout\n for i in range(SNAKE_LEN, 0, -1):\n self.body_list.append(Body(x - i, y))\n\n self.body_list.append(Body(x, y, \"@\"))\n self.window = window\n self.direction = RIGHT_KEY\n self.last_head_coor = (x, y)\n self.direction_map = {\n UP_KEY : self.move_up,\n DOWN_KEY : self.move_down,\n LEFT_KEY : self.move_left,\n RIGHT_KEY : self.move_right\n }\n\n def add_body(self, body_list):\n self.body_list.extend(body_list)\n\n def render(self):\n for body in self.body_list:\n self.window.addstr(body.y, body.x, body.char)\n\n @property\n def head(self):\n return self.body_list[-1]\n\n @property\n def coor(self):\n return self.head.x, self.head.y\n\n def update(self):\n last_body = self.body_list.pop(0)\n last_body.x = self.body_list[-1].x\n last_body.y = self.body_list[-1].y\n self.body_list.insert(-1, last_body)\n self.last_head_coor = (self.head.x, self.head.y)\n self.direction_map[self.direction]()\n\n\n def move_up(self):\n self.head.y -= 1\n if self.head.y < 1:\n self.head.y = MAX_Y\n\n def move_down(self):\n self.head.y += 1\n if self.head.y > MAX_Y:\n self.head.y = 1\n\n def move_left(self):\n self.head.x -= 1\n if self.head.x < 1:\n self.head.x = MAX_X\n\n def move_right(self):\n self.head.y += 1\n if self.head.y > MAX_X:\n self.head.y = 1\n\n def change_direction(self, direction):\n if direction != Snake.REVERSE_MAP[self.direction]:\n self.direction = direction\n\n window = curses.newwin(HEIGHT, WIDTH, 0, 0)\n window.timeout(Timeout)\n window.keypad(1)\n curses.noecho()\n curses.curs_set(0)\n window.border(0)\n\n\n snake1 = Snake(SNAKE_X, SNAKE_Y, window)\n\n\n window.border(0)\n\n snake1.render()\n while True:\n event = window.getch()\n if event == 27:\n break\n\n if event in [UP_KEY, DOWN_KEY, LEFT_KEY, RIGHT_KEY]:\n snake1.change_direction(event)\n\n if event == 32:\n key = -1\n while key != 32:\n key = window.getch()\n\n snake1.update()\n\n wrapper(main)\n\n\n# FUCK THIS SHIT THIS book: https://www.amazon.com/Learning-Python-Building-Games-programming/dp/1789802989/ref=cm_cr_srp_d_product_top?ie=UTF8 is money grabbing scam not worth your time. Goddammit. Let's draw some dicks with turtle instead\n" }, { "alpha_fraction": 0.39030376076698303, "alphanum_fraction": 0.4409328103065491, "avg_line_length": 30.346153259277344, "blob_id": "25b7049b06fb0a051ec8367c9e164edc77e693e7", "content_id": "6f8e765b706160cb5c59a04638e143e05e74d31f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3259, "license_type": "no_license", "max_line_length": 188, "num_lines": 104, "path": "/main_1.py", "repo_name": "Pawel-9215/myStruggles", "src_encoding": "UTF-8", "text": "#here we go with the bloody snake\n\nimport curses\nimport random\n\nfrom random import randint\nfrom curses import wrapper\nfrom curses import KEY_DOWN, KEY_UP, KEY_LEFT, KEY_RIGHT\n\ndef main(screen):\n \n #function calls\n win_topleft_corner = (0, 0)\n win_bottomright_corner = (20, 60) \n win = curses.newwin(win_bottomright_corner[0], win_bottomright_corner[1], win_topleft_corner[0], win_topleft_corner[1])\n win.border(0)\n win.keypad(1)\n\n curses.curs_set(0)\n curses.cbreak()\n curses.noecho()\n \n\n #variables\n snake = [[6,8], [6,7], [6,6]]\n food = [10,20]\n snake_speed = 150\n score = 0\n snake_body = \"O\"\n\n#119 - up w\n#115 - down s\n#97 - left a\n#100 - right d\n\n key = 100 #by default\n opposite_key = 97\n\n while key != 27:\n \n win.timeout(snake_speed)\n default_key = key\n event = win.getch()\n \n win.clear()\n win.border(0)\n key = key if event == -1 else event \n if event not in [119, 115, 97, 100, 27] or event in [opposite_key]:\n key = default_key \n \n opposite_key = {119:115, 115:119, 97:100, 100:97}.get(key)\n\n \n snake.insert(0, [snake[0][0] + (key == 115 and 1) + (key == 119 and -1), snake[0][1] + (key == 97 and -1) + (key == 100 and 1)])\n \n\n if snake[0] == food:\n snake_speed -= 1\n food = []\n score += 1\n while food == []:\n food = [randint(win_topleft_corner[0]+1, win_bottomright_corner[0]-2), randint(win_topleft_corner[1]+1, win_bottomright_corner[1]-2)]\n else:\n snake.pop(-1)\n\n #snake killing\n if snake[0] in snake[1:]:\n #snake is dead\n snake_body = \"X\"\n key = 27\n\n if snake[0][0] <= win_topleft_corner[0] or snake[0][0] >= win_bottomright_corner[0]-1 or snake[0][1] <= win_topleft_corner[1] or snake[0][1] >= win_bottomright_corner[1]-1:\n #snek is also dead\n snake_body = \"X\"\n key = 27\n \n #Draw snake head:\n win.addch(snake[0][0], snake[0][1], \"@\")\n #draw snake body:\n\n for seg in snake[1:]:\n win.addch(seg[0], seg[1], snake_body)\n\n #draw food\n win.addch(food[0], food[1], \"%\")\n\n win.addstr(0, 3, \"\".join([\" key = \", str(key), \" \"]))\n win.addstr(0, 16, \"\".join([\" event = \", str(event), \" \"]))\n win.addstr(win_bottomright_corner[0]-1, win_topleft_corner[1]+4, \"\".join([\" score: \", str(score), \" \"]))\n\n win.addstr(win_bottomright_corner[0]-1, win_topleft_corner[1]+20, \"\".join([\" speed: \", str(151 - snake_speed), \" \"]))\n\n curses.cbreak(1)\n win.timeout(2000)\n test = win.getch()\n\n \n \n\n\n\n\n\nwrapper(main)" }, { "alpha_fraction": 0.8311688303947449, "alphanum_fraction": 0.8311688303947449, "avg_line_length": 37.5, "blob_id": "b607302334ae9d539b6cce9d0ca8c42c0850bf1a", "content_id": "9afc4ac7e7650412bfc856f86f99c5d1676fcc38", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 77, "license_type": "no_license", "max_line_length": 62, "num_lines": 2, "path": "/README.md", "repo_name": "Pawel-9215/myStruggles", "src_encoding": "UTF-8", "text": "# myStruggles\nthis is playground for my Python testing and learning projects\n" }, { "alpha_fraction": 0.5957446694374084, "alphanum_fraction": 0.6106383204460144, "avg_line_length": 15.103447914123535, "blob_id": "f3285a58bbb394c08dfbf88959d07c69ca86d6ca", "content_id": "6d07a44127205d0ca4fce7ccc3a813cb1c781c4c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 470, "license_type": "no_license", "max_line_length": 60, "num_lines": 29, "path": "/santa_1.py", "repo_name": "Pawel-9215/myStruggles", "src_encoding": "UTF-8", "text": "\n\ninput_data = open(\"input.txt\")\nmass = []\nall_the_mass = 0\n\nfor line in input_data:\n mass.append(int(line))\n\n#print(mass)\n\n\n\ndef calculate_fuel(mass):\n\n total_fuel = int(mass/3) - 2\n \n if total_fuel <= 0:\n return 0\n else:\n total_fuel = total_fuel + calculate_fuel(total_fuel)\n return total_fuel\n\n\n#print(calculate_fuel(14))\n\nfor i in range(len(mass)):\n\n all_the_mass = all_the_mass + calculate_fuel(mass[i])\n\nprint(all_the_mass)\n\n" }, { "alpha_fraction": 0.4127434194087982, "alphanum_fraction": 0.43202874064445496, "avg_line_length": 28.065933227539062, "blob_id": "67fbafe2a33cdc5520319ece90c212195ea28402", "content_id": "4bacde926b9723043900c6fe1cf4370a1fb34822", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5289, "license_type": "no_license", "max_line_length": 124, "num_lines": 182, "path": "/Curses_anim.py", "repo_name": "Pawel-9215/myStruggles", "src_encoding": "UTF-8", "text": "import math\nfrom math import sqrt\nimport curses\nimport random\nfrom curses import wrapper\nfrom random import randint\n\nUP_KEY = 119\nDOWN_KEY = 115\nLEFT_KEY = 97\nRIGHT_KEY = 100\n\nall_leafs = []\n\n\nif __name__ == \"__main__\":\n \n\n def main(screen):\n \n #screen.timeout(1000)\n #screen function calls\n num_rows, num_cols = screen.getmaxyx()\n min_x = 1\n min_y = 1\n max_x = num_cols-2\n max_y = num_rows-2\n screen.keypad(1)\n curses.noecho()\n curses.curs_set(0)\n screen.border(0)\n screen.timeout(0)\n screen.addstr(0, 2, \"\".join([\"Screen size: \", str(num_rows), \" x \", str(num_cols)]))\n\n #classes\n\n class Spawner():\n x_position = min_x + 2\n y_position = min_y + 2\n\n def __init__(self, y, x):\n self.x_position = x\n self.y_position = y\n\n def self_render(self):\n screen.addstr(self.y_position, self.x_position-1, \"[ ]\")\n\n def move(self, key):\n if key == UP_KEY:\n if self.y_position - 1 < min_y:\n self.y_position = max_y\n else:\n self.y_position -= 1\n if key == DOWN_KEY:\n if self.y_position + 1 > max_y:\n self.y_position = min_y\n else:\n self.y_position += 1\n if key == LEFT_KEY:\n if self.x_position - 1 < min_x:\n self.x_position = max_x\n else:\n self.x_position -= 1\n if key == RIGHT_KEY:\n if self.x_position + 1 > max_x:\n self.x_position = min_x\n else:\n self.x_position += 1\n \n\n def spawn(self):\n Leaf(self.y_position, self.x_position)\n\n class Leaf():\n dire = [-1, 1]\n randomdir = dire[randint(0, 1)]\n y_position = 0\n x_position = 0\n velocity = 1\n speed_counter = 0\n global all_leafs\n\n def __init__(self, y, x):\n self.x_position = x\n self.y_position = y\n all_leafs.append(self)\n\n\n def set_new_position(self):\n \n if find_collision(self.y_position + 1, self.x_position) == False:\n self.y_position += 1\n if self.velocity < 20:\n self.velocity += 1+2/(self.velocity+0.1)\n elif find_collision(self.y_position + 1, self.x_position + self.randomdir) == False:\n self.x_position += self.randomdir\n self.y_position += 1\n if self.velocity < 20:\n self.velocity += 1+2/(self.velocity+0.1)\n elif find_collision(self.y_position, self.x_position + self.randomdir) == False and self.velocity > 0 :\n self.x_position += self.randomdir\n if self.velocity > 0:\n self.velocity -= 1\n else:\n self.randomdir = -self.randomdir\n if self.velocity > 0:\n self.velocity -= 1\n \n \n\n def renderself(self):\n screen.addstr(self.y_position, self.x_position, \"O\")\n\n def tick(self):\n self.speed_counter += 1\n if self.speed_counter + self.velocity >= 30:\n \n self.set_new_position()\n \n self.speed_counter = 0\n self.renderself()\n\n def find_collision(y_pos, x_pos):\n for ob in all_leafs:\n if ob.y_position == y_pos and ob.x_position == x_pos:\n return True\n if y_pos >= max_y or y_pos <= min_y:\n return True\n if x_pos >= max_x or x_pos <= min_x:\n return True\n return False\n\n\n tiktak = 1\n spaw = Spawner(min_y+2, min_x+2)\n while True:\n \n \n screen.refresh()\n curses.napms(5)\n\n event = screen.getch()\n if event == 27 or event == 113: #quit\n break\n\n if event in [UP_KEY, DOWN_KEY, LEFT_KEY, RIGHT_KEY]:\n spaw.move(event)\n\n if event == 32: #spacja\n spaw.spawn() \n\n screen.erase()\n if tiktak <= 200:\n Leaf(1, randint(int(min_x+max_x/4), int(max_x/2)))\n\n #Leaf(2+tiktak, 2)\n\n \n\n\n\n for obj in all_leafs:\n obj.tick()\n spaw.self_render()\n\n tiktak += 1\n screen.border(0)\n #screen.addstr(0, 40, str(tiktak))\n screen.addstr(0, 30, str(event))\n screen.addstr(0, 10, str(all_leafs[0].velocity))\n screen.addstr(max_y+1, 5, \"WSAD to move spawner, SPACEBAR to spawn new particles, Q to quit\", curses.A_STANDOUT)\n \n\n \n\n\n\n wrapper(main)\n\n print(\"bye\")\n #print(all_leafs)\n #input(\"\")" }, { "alpha_fraction": 0.5738636255264282, "alphanum_fraction": 0.5965909361839294, "avg_line_length": 23.14285659790039, "blob_id": "0f01f3be66d618707fee8bb7b1bd309c4553463e", "content_id": "88c4c6965d2f4becfb16b795056f2560d98db549", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 352, "license_type": "no_license", "max_line_length": 47, "num_lines": 14, "path": "/nauka.py", "repo_name": "Pawel-9215/myStruggles", "src_encoding": "UTF-8", "text": "\ndef half_in_half(sentence):\n char_list = []\n for i in range(0, int(len(sentence)/2), 2):\n char_list.append(sentence[i])\n print(\"\".join(char_list))\n\n\nlist_inp1 = []\nhow_many = int(input())\n#print(type(how_many))\nfor i in range(1, how_many+1):\n list_inp1.append(input())\nfor ranges in list_inp1:\n half_in_half(ranges)\n\n " }, { "alpha_fraction": 0.7479252815246582, "alphanum_fraction": 0.7531120181083679, "avg_line_length": 29.15625, "blob_id": "53b69b608a58fb838e9ba293ad80a7c07a36d60d", "content_id": "578f5d93eeecb10a1c6802b5680e70544f24c5c1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 964, "license_type": "no_license", "max_line_length": 69, "num_lines": 32, "path": "/game/version1/resources.py", "repo_name": "Pawel-9215/myStruggles", "src_encoding": "UTF-8", "text": "import pyglet\n\npyglet.resource.path = ['../resources']\npyglet.resource.reindex()\n\n\nplayer_image = pyglet.resource.image(\"ship_1.png\")\nbullet_image = pyglet.resource.image(\"bullet.png\")\nasteroid_big_image = pyglet.resource.image(\"big_meteor.png\")\nasteroid_small_image = pyglet.resource.image(\"small_meteor.png\")\nbackground_image = pyglet.resource.image(\"background.png\")\nengine_flame = pyglet.resource.image(\"engine_flame.png\")\nmusic_theme = pyglet.resource.media(\"muzyki.wav\")\n\nengine_sound = pyglet.resource.media(\"engine.wav\", streaming = False)\n\nshot_sound = pyglet.resource.media(\"shot_1.wav\", streaming=False)\nexpl_sound = pyglet.resource.media(\"expl_1.wav\", streaming=False)\n\n\n\ndef center_image(image):\n #set ancor point to center\n image.anchor_x = image.width // 2\n image.anchor_y = image.height // 2\n\n\ncenter_image(player_image)\ncenter_image(engine_flame)\ncenter_image(bullet_image)\ncenter_image(asteroid_big_image)\ncenter_image(asteroid_small_image)" }, { "alpha_fraction": 0.619178056716919, "alphanum_fraction": 0.619178056716919, "avg_line_length": 18.157894134521484, "blob_id": "0e8a67a778d2a8bf03c738ec3def1da570823596", "content_id": "205d8c57f019ec9687a03a5a18bf6280a9b8750f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 365, "license_type": "no_license", "max_line_length": 48, "num_lines": 19, "path": "/classes_t1.py", "repo_name": "Pawel-9215/myStruggles", "src_encoding": "UTF-8", "text": "# learnig about classes in OOP\n\nclass Bird:\n def about():\n print(\"Species: Bird\")\n def Dance(self):\n print(\"Not all birds dance but some do\")\n\nclass peaCock(Bird):\n def Dance(self):\n print(\"Peacock can dance\")\n\nclass Sparrow(Bird):\n def Dance(self):\n print(\"Sparrow can't dance\")\n\n\nfirst_bird = peaCock()\nfirst_bird.Dance()\n\n" }, { "alpha_fraction": 0.5401069521903992, "alphanum_fraction": 0.625668466091156, "avg_line_length": 45.5, "blob_id": "b368cf11bda789a6003695c2b3a9ce98b101608f", "content_id": "9004658a7871a1703e6c74ece5db6d2608e1ea00", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 187, "license_type": "no_license", "max_line_length": 84, "num_lines": 4, "path": "/game/version1/util.py", "repo_name": "Pawel-9215/myStruggles", "src_encoding": "UTF-8", "text": "import pyglet, math\n\ndef distance(point_1=(0,0), point_2=(0,0)): #return distance between two points\n return math.sqrt((point_1[0] - point_2[0]) ** 2 + (point_1[1] - point_2[1]) ** 2)\n\n" }, { "alpha_fraction": 0.6184698343276978, "alphanum_fraction": 0.6386922597885132, "avg_line_length": 24.799999237060547, "blob_id": "cdfca248594b7e1cccff24141bb5b3e8597b3436", "content_id": "c5e7599e1c79087fb0ddfd94e48caabb017e7308", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2967, "license_type": "no_license", "max_line_length": 137, "num_lines": 115, "path": "/game/version1/asteroid_1.py", "repo_name": "Pawel-9215/myStruggles", "src_encoding": "UTF-8", "text": "import pyglet\nimport math\n\npyglet.resource.path = ['../resources']\npyglet.resource.reindex()\n\nimport resources, load\nimport physicalobject\nimport player\nimport fx\npyglet.options['audio'] = ('openal', 'pulse', 'directsound', 'silent')\ngame_window = pyglet.window.Window(800, 800, caption = \"Meteora v0.01\")\n\nmain_batch = pyglet.graphics.Batch()\nscore = 0\nlives = 3\nscore_label0 = pyglet.text.Label(text=\"Score: \", x=10, y=game_window.height-24, batch=main_batch)\nscore_label = pyglet.text.Label(text=str(score), x=10, y=game_window.height-44, batch=main_batch)\n\nplayer_icons = load.player_lives(lives, batch=main_batch)\n\ndef update_score(score, label): \n label.text = str(score)\n label.draw()\n\n\nlevel_label = pyglet.text.Label(text=\"Meteor Game\", x=game_window.width//2, y=game_window.height-24, anchor_x='center', batch=main_batch)\n\nplayer_ship = player.Player(x=400, y=400, batch=main_batch)\nbackground = pyglet.sprite.Sprite(img=resources.background_image, x=0, y=0)\n\nasteroids = load.asteroids(5, player_ship.position, main_batch)\n\ngame_objects = asteroids + [player_ship]\ngame_fx = []\n\ntheme = pyglet.media.Player()\ntheme.queue(resources.music_theme)\ntheme.loop = True\n\ntheme.play()\n\n@game_window.event\ndef on_draw():\n game_window.clear()\n\n background.draw()\n \n main_batch.draw()\n for fx in game_fx:\n fx.draw()\n\n\n\ndef update(dt):\n \n to_add = []\n to_add_fx = []\n global score\n global lives\n global player_icons\n\n for i in range(len(game_objects)):\n for j in range(i+1, len(game_objects)):\n obj_1 = game_objects[i]\n obj_2 = game_objects[j]\n if not obj_1.dead and not obj_2.dead:\n if obj_1.collides_with(obj_2):\n obj_1.handle_collision_with(obj_2)\n obj_2.handle_collision_with(obj_1)\n\n for obj in game_objects:\n obj.update(dt)\n to_add.extend(obj.new_objects)\n to_add_fx.extend(obj.new_fx)\n obj.new_objects = []\n obj.new_fx = []\n\n game_objects.extend(to_add)\n game_fx.extend(to_add_fx)\n\n for fx in game_fx:\n fx.update(dt)\n\n for to_remove in [obj for obj in game_objects if obj.dead]:\n \n if to_remove.is_bullet == False:\n score += 1\n update_score(score, score_label)\n\n if to_remove.is_player == True and lives > 0:\n lives -= 1\n to_remove.x=400\n to_remove.y=400\n to_remove.velocity_x = 0\n to_remove.velocity_y = 0\n to_remove.rotation = 0\n to_remove.dead = False\n player_icons = load.player_lives(lives, batch=main_batch)\n return\n #test\n to_remove.delete()\n game_objects.remove(to_remove)\n\n for to_remove in [obj for obj in game_fx if obj.dead]:\n to_remove.delete()\n game_fx.remove(to_remove)\n\n \n\nif __name__ == '__main__':\n game_window.push_handlers(player_ship)\n game_window.push_handlers(player_ship.key_handler)\n pyglet.clock.schedule_interval(update, 1/120.0)\n pyglet.app.run()\n" } ]
14
KrzysztofNawara/ar-parallel-solver
https://github.com/KrzysztofNawara/ar-parallel-solver
e7c533ff3afdb31e0c7b28328d2e764798a439e5
cc007b36fb6a9b1da78b0b1f9c3acb7386e0fc4b
feb0082188064927b0b0ef27b25f510ad4bbac69
refs/heads/master
2021-08-19T19:58:47.059528
2017-11-27T09:28:34
2017-11-27T09:28:34
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5497075915336609, "alphanum_fraction": 0.5750487446784973, "avg_line_length": 33.20000076293945, "blob_id": "8b30534ac5994635ee2bbb61058c23cc0316a5da", "content_id": "1d2d13cc995b27f3878b3da20ce7e54f8b638f46", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 513, "license_type": "no_license", "max_line_length": 95, "num_lines": 15, "path": "/superc/pth/tests/t1_scheduler.py", "repo_name": "KrzysztofNawara/ar-parallel-solver", "src_encoding": "UTF-8", "text": "\nimport os\nfrom common import *\n\n# Test 1 - check communication impact by keeping workload constant, but manipulating N/ts ratio\nTEST_ID = 1\n\nnode_counts = [1, 4, 9, 16]\n\nlog_dir, rdir = prepare_log_and_result_dirs_for_test(TEST_ID)\n\nfor nc in node_counts:\n os.system(run_batch_string(nc, scripts_dir + \"t{}_executor.py\".format(TEST_ID),\n log_prefix=log_dir + \"{}_nodes\".format(nc),\n time=\"00:25:00\",\n results_dir=rdir))" }, { "alpha_fraction": 0.7443249821662903, "alphanum_fraction": 0.7479091882705688, "avg_line_length": 51.375, "blob_id": "f7c31f23690ac6ddfac8287fbf3aec7318ec75df", "content_id": "0beb3ab9ade6928d9c859d797bb69f9e40b6ad4e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 837, "license_type": "no_license", "max_line_length": 128, "num_lines": 16, "path": "/README.md", "repo_name": "KrzysztofNawara/ar-parallel-solver", "src_encoding": "UTF-8", "text": "Algorithm variants:\n- seq - sequential variant\n- parallel - separate buffers, not copied to back/front, if-ing in Workspace::get\n- parallel_lb - no overlapping + separate comm buffers, but contents copied to large buffer\n- parallel_async - overlapped computations, but separate buffers (copied to front/back)\n- parallel_gap - overlapping, all transfers to directly to front/back buffer\n- parallel_ts - gaped transfers directly to front/back buffer, with time intervals (fetch additional data to avoid communiation)\n\nWhat differs between variants:\n1. separate buffers, iffing [parallel] ->\n separate buffers, copying [_lb, _async]] ->\n integrated buffers, gapped transfers [_gap, _ts]\n\n2. non-overlapped transfers -> [parallel, _lb] -> overlapped [_async, _gap, _ts]\n\n3. no time intervals [everyhing but _ts] -> time intervals [_ts]" }, { "alpha_fraction": 0.6123963594436646, "alphanum_fraction": 0.6214228272438049, "avg_line_length": 23.238248825073242, "blob_id": "8c4066443bbbdc672bdcffac3d4b9d512f3b80df", "content_id": "4cdfd97d9b744a570c13755d323a5feed047e681", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 14956, "license_type": "no_license", "max_line_length": 109, "num_lines": 617, "path": "/src/parallel_async.cpp", "repo_name": "KrzysztofNawara/ar-parallel-solver", "src_encoding": "UTF-8", "text": "\n#include <mpi.h>\n#include <exception>\n#include <iostream>\n#include <cmath>\n#include <cstring>\n#include \"shared.h\"\n\nconst int N_INVALID = -1;\n\nenum Neighbour {\n\tLEFT = 0,\n\tTOP = 1,\n\tRIGHT = 2,\n\tBOTTOM = 3,\n};\n\nclass ClusterManager : private NonCopyable {\npublic:\n\tClusterManager(const Coord N) : bitBucket(0) {\n\t\tMPI_Init(nullptr, nullptr);\n\t\tMPI_Comm_rank(comm, &nodeId);\n\t\tMPI_Comm_size(comm, &nodeCount);\n\n\t\tpartitioner = new Partitioner(nodeCount, 0.0, 1.0, N);\n\t\tsideLen = partitioner->get_nodes_grid_dimm();\n\t\tstd::tie(row, column) = partitioner->node_id_to_grid_pos(nodeId);\n\n\t\tinitNeighbours();\n\n\t\terr_log() << \"Cluster initialized successfully. I'm (\" << row << \",\" << column << \")\" << std::endl;\n\t}\n\n\t~ClusterManager() {\n\t\tdelete partitioner;\n\t\tMPI_Finalize();\n\t}\n\n\tPartitioner& getPartitioner() {return *partitioner;}\n\n\tint getNodeCount() { return nodeCount; }\n\tint getNodeId() { return nodeId; }\n\tstd::pair<NumType, NumType> getOffsets() { return partitioner->get_math_offset_node(row, column); };\n\tMPI_Comm getComm() { return comm; }\n\n\tstd::ostream& err_log() {\n\t\treturn std::cerr;\n\t}\n\n\tstd::ostream& master_err_log() {\n\t\tif(nodeId == 0) {\n\t\t\treturn std::cerr;\n\t\t} else {\n\t\t\treturn bitBucket;\n\t\t}\n\t}\n\n\tint* getNeighbours() {\n\t\treturn &neighbours[0];\n\t}\n\n\nprivate:\n\tconst static auto comm = MPI_COMM_WORLD;\n\n\tint nodeId;\n\tint nodeCount;\n\tint row;\n\tint column;\n\n\tPartitioner *partitioner;\n\n\tint sideLen;\n\tint neighbours[4];\n\n\tstd::ostream bitBucket;\n\n\tvoid initNeighbours() {\n\t\tif(row == 0) { neighbours[Neighbour::BOTTOM] = N_INVALID; }\n\t\telse { neighbours[Neighbour::BOTTOM] = nodeId-sideLen; }\n\n\t\tif(row == sideLen-1) { neighbours[Neighbour::TOP] = N_INVALID; }\n\t\telse { neighbours[Neighbour::TOP] = nodeId+sideLen; }\n\n\t\tif(column == 0) { neighbours[Neighbour::LEFT] = N_INVALID; }\n\t\telse { neighbours[Neighbour::LEFT] = nodeId-1; }\n\n\t\tif(column == sideLen-1) { neighbours[Neighbour::RIGHT] = N_INVALID; }\n\t\telse { neighbours[Neighbour::RIGHT] = nodeId+1; }\n\n\t\terr_log() << \"Neighbours: \"\n\t\t << \" LEFT: \" << neighbours[LEFT]\n\t\t << \" TOP: \" << neighbours[TOP]\n\t\t << \" RIGHT: \" << neighbours[RIGHT]\n\t\t << \" BOTTOM: \" << neighbours[BOTTOM] << std::endl;\n\t}\n};\n\n/*\n * Buffer exposal during async\n * I - start of innies calculation\n * O - start of outies calulations\n * s - swap, calculations finished for given iteration\n * out_r - recv, period of outer buffers exposal to the network\n * out_s - send, period of inner buffers exposal to the network\n *\n * I O s I O s\n * - out_r -| |-- out_r -|\n * - out_s -| |-- out_s -|\n *\n * receive (outer) - needed when calculating border values\n *\t* must be present when i-1 outies calculated\n *\t* can lie idle during subsequent outies calculation (assuming no memcpy)\n * send (inner)\n *\t* can be sent only when values calculated (happens right after outer become available)\n *\t* can be exposed only until outies from next iteration need to be calculated\n *\n * Memcpy impact?\n * Separate inner buffer: we don't have to wait with i+1 outies calculation until buffers are free (otherwise\n * we could overwrite data being sent)\n * Separate outer buffer: data required to carry out computations, but we can have a couple of spares with\n * outstanding receive request attached\n *\n * Single memcpied send buffer:\n * Allow to extend buffer exposure into outies calculation phase\n *\n * I O s I O s\n * - out_r -| |-- out_r -|\n * --out_m--|xxxx| memcpy-> out_s1\n * - out_s1 -----| |------------|\n *\n */\n\nclass Comms : private NonCopyable {\npublic:\n\tComms(const Coord innerLength) : innerLength(innerLength) {\n\t\treset_rqb(send_rqb, false);\n\t\treset_rqb(recv_rqb, false);\n\t}\n\n\t~Comms() {\n\t\t// cancel outstanding receives\n\t\treset_rqb(recv_rqb, true);\n\t}\n\n\tvoid wait_for_send() {\n\t\twait_for_rqb(send_rqb);\n\t}\n\n\tvoid wait_for_receives() {\n\t\twait_for_rqb(recv_rqb);\n\t}\n\n\t#define SCHEDULE_OP(OP, RQB) \\\n\t\tauto idx = RQB.second; \\\n\t\tauto* rq = RQB.first + idx; \\\n\t\tOP(buffer, innerLength, NUM_MPI_DT, nodeId, 1, MPI_COMM_WORLD, rq); \\\n\t\tRQB.second++;\n\t\n\tvoid schedule_send(int nodeId, NumType* buffer) {\n\t\t//DL( \"schedule send to \" << nodeId )\n\t\tSCHEDULE_OP(MPI_Isend, send_rqb)\n\t\t//DL( \"rqb afterwards\" << send_rqb.second )\n\t}\n\n\tvoid schedule_recv(int nodeId, NumType* buffer) {\n\t\t//DL( \"schedule receive from \" << nodeId )\n\t\tSCHEDULE_OP(MPI_Irecv, recv_rqb)\n\t\t//DL( \"rqb afterwards\" << recv_rqb.second )\n\t}\n\n\t#undef SCHEDULE_OP\n\nprivate:\n\tconst static int RQ_COUNT = 4;\n\tusing RqBuffer = std::pair<MPI_Request[RQ_COUNT], int>; \n\t\n\tconst Coord innerLength;\n\t\n\tRqBuffer send_rqb;\n\tRqBuffer recv_rqb;\n\n\tvoid reset_rqb(RqBuffer& b, bool pendingWarn) {\n\t\tfor(int i = 0; i < RQ_COUNT; i++) {\n\t\t\tif(b.first[i] != MPI_REQUEST_NULL) {\n\t\t\t\t/* commenting out because caused error:\n\t\t\t\t * Fatal error in PMPI_Cancel: Invalid MPI_Request, error stack:\n\t\t\t\t * PMPI_Cancel(201): MPI_Cancel(request=0x7ffc407347c8) failed\n\t\t\t\t * PMPI_Cancel(177): Null Request pointer\n\t\t\t\t */\n\t\t\t\t// MPI_Cancel(b.first + i);\n\t\t\t\tb.first[i] = MPI_REQUEST_NULL;\n\n\t\t\t\tif(pendingWarn) {\n\t\t\t\t\tstd::cerr << \"WARN: pending request left in the queue, cancelling it!\" << std::endl;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tb.second = 0;\n\t}\n\t\n\tvoid wait_for_rqb(RqBuffer& b) {\n\t\t//DL( \"waiting for rqb\" )\n\t\tfor(int i = 0; i < b.second; i++) {\n\t\t\t//DL( \"iteration: \" << i )\n\t\t\tint finished_idx;\n\t\t\tMPI_Waitany(b.second, b.first, &finished_idx, MPI_STATUSES_IGNORE);\n\t\t}\n\n\t\t//DL( \"finished waiting for rqb!\" )\n\t\treset_rqb(b, true);\n\t\t//DL( \"finished resettng rqb\" );\n\t}\n};\n\n\nstruct CSet {\n\tCSet(const Coord x = 0, const Coord y = 0) : x(x), y(y) {}\n\n\tCoord x;\n\tCoord y;\n\n\tbool operator==(const CSet &o) const {\n\t\treturn x == o.x && y == o.y;\n\t}\n\n\tbool operator!=(const CSet &o) const {\n\t\treturn !operator==(o);\n\t}\n\n\tconst std::string toStr() {\n\t\tstd::ostringstream oss;\n\t\toss << \"(\" << x << \",\" << y << \")\";\n\t\treturn oss.str();\n\t}\n};\n\nstruct AreaCoords {\n\tAreaCoords() {}\n\tAreaCoords(const CSet bottomLeft, const CSet upperRight) : bottomLeft(bottomLeft), upperRight(upperRight) {}\n\n\tCSet bottomLeft;\n\tCSet upperRight;\n\n\tconst std::string toStr() {\n\t\tstd::ostringstream oss;\n\t\toss << \"[ \" << bottomLeft.toStr() << \" | \" << upperRight.toStr() << \"]\";\n\t\treturn oss.str();\n\t}\n};\n\n/**\n * Return inclusive ranges !!!\n */\nclass WorkspaceMetainfo : private NonCopyable {\npublic:\n\tWorkspaceMetainfo(const Coord innerSize, const Coord boundaryWidth) {\n\t\tprecalculate(innerSize, boundaryWidth);\n\t}\n\n\tconst AreaCoords& working_workspace_area() const { return wwa; }\n\n\tconst AreaCoords& innies_space_area() const { return isa; };\n\n\t/**\n\t * That's how shared areas are divided:\n\t * ___________\n\t * | |_______| |\n\t * | | | |\n\t * | | | |\n\t * | |_______| |\n\t * |_|_______|_|\n\t */\n\tconst std::array<AreaCoords, 4>& shared_areas() const { return sha; }\n\t\nprivate:\n\tAreaCoords wwa;\n\tAreaCoords isa;\n\tstd::array<AreaCoords, 4> sha;\n\t\n\tvoid precalculate(const Coord innerSize, const Coord boundaryWidth) {\n\t\tconst auto lid = innerSize-1;\n\t\t\n\t\twwa.bottomLeft.x = 0;\n\t\twwa.bottomLeft.y = 0;\n\t\twwa.upperRight.x = lid;\n\t\twwa.upperRight.y = lid;\n\n\t\tisa.bottomLeft.x = boundaryWidth;\n\t\tisa.bottomLeft.y = boundaryWidth;\n\t\tisa.upperRight.x = lid - boundaryWidth;\n\t\tisa.upperRight.y = lid - boundaryWidth;\n\t\t\n\t\tsha = {\n\t\t\tAreaCoords(CSet(0, 0), CSet(boundaryWidth-1, lid)), // left\n\t\t\tAreaCoords(CSet(innerSize - boundaryWidth, 0), CSet(lid, lid)), // right\n\t\t\tAreaCoords(CSet(boundaryWidth, innerSize-boundaryWidth), CSet(lid-boundaryWidth, lid)), // top\n\t\t\tAreaCoords(CSet(boundaryWidth, 0), CSet(lid-boundaryWidth, boundaryWidth-1)), // bottom\n\t\t};\n\t}\n};\n\nvoid test_wmi() {\n\tWorkspaceMetainfo wmi(9, 2);\n\n\tauto work_area = wmi.working_workspace_area();\n\tauto innie = wmi.innies_space_area();\n\tauto in_bound = wmi.shared_areas();\n\n\t#define STR(X) std::cerr << X.toStr() << std::endl;\n\n\tassert(work_area.bottomLeft == CSet(0,0));\n\tassert(work_area.upperRight == CSet(8,8));\n\n\n\tassert(innie.bottomLeft == CSet(2,2));\n\tassert(innie.upperRight == CSet(6,6));\n\n\t// left\n\tassert(in_bound[0].bottomLeft == CSet(0,0));\n\tassert(in_bound[0].upperRight == CSet(1,8));\n\t// right\n\tassert(in_bound[1].bottomLeft == CSet(7,0));\n\tassert(in_bound[1].upperRight == CSet(8,8));\n\t// top\n\tassert(in_bound[2].bottomLeft == CSet(2,7));\n\tassert(in_bound[2].upperRight == CSet(6,8));\n\t// bottom\n\tassert(in_bound[3].bottomLeft == CSet(2,0));\n\tassert(in_bound[3].upperRight == CSet(6,1));\n\n\t#undef STR\n}\n\nvoid iterate_over_area(AreaCoords area, std::function<void(const Coord, const Coord)> f) {\n\tfor(Coord x_idx = area.bottomLeft.x; x_idx <= area.upperRight.x; x_idx++) {\n\t\tfor(Coord y_idx = area.bottomLeft.y; y_idx <= area.upperRight.y; y_idx++) {\n\t\t\tf(x_idx, y_idx);\n\t\t}\n\t}\n}\n\nclass Workspace : private NonCopyable {\npublic:\n\tWorkspace(const Coord innerSize, const Coord borderWidth, ClusterManager& cm, Comms& comm)\n\t\t\t: innerSize(innerSize), cm(cm), comm(comm), borderWidth(borderWidth)\n\t{\n\t\touterSize = innerSize+2*borderWidth;\n\t\tmemorySize = outerSize*outerSize;\n\n\t\tneigh = cm.getNeighbours();\n\t\tinitialize_buffers();\n\t}\n\n\t~Workspace() {\n\t\tfreeBuffers();\n\t}\n\n\tvoid set_elf(const Coord x, const Coord y, const NumType value) {\n\t\t*elAddress(x, y, front) = value;\n\t}\n\n\tNumType elb(const Coord x, const Coord y) {\n\t\treturn *elAddress(x,y,back);\n\t}\n\n\tCoord getInnerLength() {return innerSize;}\n\n\t/*\n\t * All 4 functions are called before swap() is invoked!\n\t * 2 first before outie calculations, last two after them\n\t */\n\n\tvoid ensure_out_boundary_arrived() {\n\t\tcomm.wait_for_receives();\n\t\tcopy_outer_buffer_to(back);\n\t}\n\n\tvoid ensure_in_boundary_sent() {\n\t\tcomm.wait_for_send();\n\t}\n\n\tvoid send_in_boundary() {\n\t\tcopy_from_x_to_inner_buffer(front);\n\n\t\tfor(int i = 0; i < 4; i++) {\n\t\t\tif(neigh[i] != N_INVALID) {\n\t\t\t\tcomm.schedule_send(neigh[i], innerEdge[i]);\n\t\t\t}\n\t\t}\n\t}\n\n\tvoid start_wait_for_new_out_border() {\n\t\tfor(int i = 0; i < 4; i++) {\n\t\t\tif(neigh[i] != N_INVALID) {\n\t\t\t\tcomm.schedule_recv(neigh[i], outerEdge[i]);\n\t\t\t}\n\t\t}\n\t}\n\n\tvoid swap() {\n\t\tswapBuffers();\n\t}\n\nprivate:\n\tClusterManager& cm;\n\tComms& comm;\n\tint* neigh;\n\n\tconst Coord innerSize;\n\tCoord outerSize;\n\tCoord memorySize;\n\n\tconst Coord borderWidth;\n\n\t/* horizontal could be stored with main buffer, but for convenience both horizontals and\n\t * verticals are allocated separatelly (and writes mirrored) */\n\tNumType* innerEdge[4];\n\t/* all outer edges are allocated separatelly; their length is innerLength, not innerLength + 2 */\n\tNumType* outerEdge[4];\n\tNumType *front;\n\tNumType *back;\n\n\tvoid initialize_buffers() {\n\t\tfront = new NumType[memorySize];\n\t\tback = new NumType[memorySize];\n\n\t\tfor(Coord i = 0; i < memorySize; i++) {\n\t\t\tfront[i] = 0.0;\n\t\t\tback[i] = 0.0;\n\t\t}\n\n\t\t/* create inner buffer (as comm buffers) for */\n\t\tfor(int i = 0; i < 4; i++) {\n\t\t\tif(neigh[i] != N_INVALID) {\n\t\t\t\tinnerEdge[i] = new NumType[innerSize];\n\t\t\t\touterEdge[i] = new NumType[innerSize];\n\t\t\t} else {\n\t\t\t\tinnerEdge[i] = nullptr;\n\t\t\t\touterEdge[i] = nullptr;\n\t\t\t}\n\t\t}\n\t}\n\n\tvoid freeBuffers() {\n\t\tdelete[] front;\n\t\tdelete[] back;\n\n\t\tfor(int i = 0; i < 4; i++) {\n\t\t\tif(innerEdge != nullptr) {\n\t\t\t\tdelete[] innerEdge[i];\n\t\t\t\tdelete[] outerEdge[i];\n\t\t\t}\n\t\t}\n\t}\n\n\tNumType* elAddress(const Coord x, const Coord y, NumType* base) {\n\t\treturn base + outerSize*(borderWidth + x) + (borderWidth + y);\n\t}\n\n\tvoid swapBuffers() {\n\t\tNumType* tmp = front;\n\t\tfront = back;\n\t\tback = tmp;\n\t}\n\n\tvoid copy_from_x_to_inner_buffer(NumType *x) {\n\t\t#define LOOP(EDGE, X, Y) \\\n\t\tif(neigh[EDGE] != N_INVALID) { \\\n\t\t\tfor(Coord i = 0; i < innerSize; i++) { \\\n\t\t\t\tinnerEdge[EDGE][i] = *elAddress(X,Y,x); \\\n\t\t\t} \\\n\t\t}\n\n\t\tLOOP(TOP, i, innerSize-1)\n\t\tLOOP(BOTTOM, i, 0)\n\t\tLOOP(LEFT, 0, i)\n\t\tLOOP(RIGHT, innerSize-1, i)\n\n\t\t#undef LOOP\n\t}\n\n\tvoid copy_outer_buffer_to(NumType *target) {\n\t\t#define LOOP(EDGE, X, Y) \\\n\t\tif(neigh[EDGE] != N_INVALID) { \\\n\t\t\tfor(Coord i = 0; i < innerSize; i++) { \\\n\t\t\t\t*elAddress(X,Y,target) = outerEdge[EDGE][i]; \\\n\t\t\t} \\\n\t\t}\n\n\t\tLOOP(TOP, i, innerSize)\n\t\tLOOP(BOTTOM, i, -1)\n\t\tLOOP(LEFT, -1, i)\n\t\tLOOP(RIGHT, innerSize, i)\n\n\t\t#undef LOOP\n\t}\n};\n\nstd::string filenameGenerator(int nodeId) {\n\tstd::ostringstream oss;\n\toss << \"./results/\" << nodeId << \"_t\";\n\treturn oss.str();\n}\n\nconst Coord BOUNDARY_WIDTH = 1;\n\nint main(int argc, char **argv) {\n\tstd::cerr << __FILE__ << std::endl;\n\n\tauto conf = parse_cli(argc, argv);\n\n\tClusterManager cm(conf.N);\n\tauto n_slice = cm.getPartitioner().get_n_slice();\n\tNumType x_offset, y_offset;\n\tstd::tie(x_offset, y_offset) = cm.getOffsets();\n\tauto h = cm.getPartitioner().get_h();\n\n\tComms comm(n_slice);\n\tWorkspace w(n_slice, BOUNDARY_WIDTH, cm, comm);\n\tWorkspaceMetainfo wi(n_slice, BOUNDARY_WIDTH);\n\n\tFileDumper<Workspace> d(filenameGenerator(cm.getNodeId()),\n\t n_slice,\n\t x_offset,\n\t y_offset,\n\t h,\n\t get_freq_sel(conf.timeSteps));\n\n\tTimer timer;\n\n\tMPI_Barrier(cm.getComm());\n\ttimer.start();\n\n\tauto ww_area = wi.working_workspace_area();\n\tauto wi_area = wi.innies_space_area();\n\tauto ws_area = wi.shared_areas();\n\n\tDL( \"filling boundary condition\" )\n\n\titerate_over_area(ww_area, [&w, x_offset, y_offset, h](const Coord x_idx, const Coord y_idx) {\n\t\tauto x = x_offset + x_idx*h;\n\t\tauto y = y_offset + y_idx*h;\n\t\tauto val = f(x,y);\n\t\tw.set_elf(x_idx,y_idx, val);\n\n\t\t/*\n\t\tstd::cerr << \"[\" << x_idx << \",\" << y_idx <<\"] \"\n\t\t\t << \"(\" << x << \",\" << y << \") -> \"\n\t\t\t << val << std::endl;\n */\n\t});\n\n\tDL( \"calculated boundary condition, initial communication\" )\n\n\t/* send our part of initial condition to neighbours */\n\tw.send_in_boundary();\n\tw.start_wait_for_new_out_border();\n\tw.swap();\n\n\tDL( \"initial communication done\" )\n\n\tauto eq_f = [&w](const Coord x_idx, const Coord y_idx) {\n\t\t// std::cerr << \"Entering Y loop, x y \" << y_idx << std::endl;\n\n\t\tauto eq_val = equation(\n\t\t\t\tw.elb(x_idx - 1, y_idx),\n\t\t\t\tw.elb(x_idx, y_idx - 1),\n\t\t\t\tw.elb(x_idx + 1, y_idx),\n\t\t\t\tw.elb(x_idx, y_idx + 1)\n\t\t);\n\n\t\tw.set_elf(x_idx, y_idx, eq_val);\n\t};\n\n\tfor(TimeStepCount ts = 0; ts < conf.timeSteps; ts++) {\n\t\tDL( \"Entering timestep loop, ts = \" << ts )\n\n\t\titerate_over_area(wi_area, eq_f);\n\t\tDL( \"Innies iterated, ts = \" << ts )\n\n\t\tw.ensure_out_boundary_arrived();\n\t\tDL( \"Out boundary arrived, ts = \" << ts )\n\t\tw.ensure_in_boundary_sent();\n\t\tDL( \"In boundary sent, ts = \" << ts )\n\n\t\tfor(auto a: ws_area) {\n\t\t\titerate_over_area(a, eq_f);\n\t\t}\n\n\t\tDL( \"Outies iterated, ts = \" << ts )\n\n\t\tw.send_in_boundary();\n\t\tDL( \"In boundary send scheduled, ts = \" << ts )\n\t\tw.start_wait_for_new_out_border();\n\n\t\tDL( \"Before swap, ts = \" << ts )\n\t\tw.swap();\n\n\t\tDL( \"Entering file dump\" )\n\t\tif (unlikely(conf.outputEnabled)) {\n\t\t\td.dumpBackbuffer(w, ts);\n\t\t}\n\t\tDL( \"After dump, ts = \" << ts )\n\t}\n\n\tMPI_Barrier(cm.getComm());\n\tauto duration = timer.stop();\n\n\tif(cm.getNodeId() == 0) {\n\t\tprint_result(\"parallel_async\", cm.getNodeCount(), duration, conf);\n\t\tstd::cerr << ((double)duration)/1000000000 << \" s\" << std::endl;\n\t}\n\n\tDL( \"Terminating\" )\n\n\treturn 0;\n}\n" }, { "alpha_fraction": 0.6070283651351929, "alphanum_fraction": 0.6197344064712524, "avg_line_length": 21.690908432006836, "blob_id": "4468ad772c81490efd7faa508f5be354966c0bf3", "content_id": "16180acbd0b76706ff3eef221bd1c7ad6c8d96f8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 8736, "license_type": "no_license", "max_line_length": 113, "num_lines": 385, "path": "/src/shared.h", "repo_name": "KrzysztofNawara/ar-parallel-solver", "src_encoding": "UTF-8", "text": "//\n// Created by blueeyedhush on 29.10.17.\n//\n\n#ifndef LAB1_SHARED_H\n#define LAB1_SHARED_H\n\n#include <mpi.h>\n#include <getopt.h>\n#include <cassert>\n#include <limits>\n#include <cmath>\n#include <iostream>\n#include <sstream>\n#include <functional>\n#include <fstream>\n#include \"NonCopyable.h\"\n\n// #define DEBUG\n\n#ifdef DEBUG\n\t#define DL(X) std::cerr << X << std::endl;\n\t#define DBG_ONLY(X) X;\n#else\n\t#define DL(X) (void)0;\n\t#define DBG_ONLY(X) (void)0;\n#endif\n\nusing Coord = long long;\nusing TimeStepCount = size_t;\nusing NumType = double;\nconst MPI_Datatype NUM_MPI_DT = MPI_DOUBLE;\nconst auto NumPrecision = std::numeric_limits<NumType>::max_digits10;\nusing Duration = long long;\n\nconst Coord KEEP_X_POINTS = 25;\nconst TimeStepCount KEEP_X_TIMEFRAMES = 100;\n\n/* 0 1\n * _*_*_*_*_ _*_*_*_*_\n * * | * * * * | * * * * | *\n * * | * * * * | * * * * | *\n *\n * That's how we partition vertices across nodes; external vertices are boundary conditions\n * - innerSize - grid length without boundary conditions\n * - outerSize - grid length including boundary conditions\n * Non-edge nodes instead of boundary conditions have values copied over from other nodes.\n *\n * nodes in each partition are indexed from 0 to n_partitioned-1\n * global offset of nodes in partition is (0, n_partitioned)\n * but if we want math offset, we get (0, (n_partitioned+1)*h) /because of boundary being outside/\n *\n * Node-to-carthesian system mapping:\n * | 2 | 3 |\n * | 0 | 1 |\n * node_row no -> y\n * node_column no -> x\n * _don't mix indexing within workspace/any other array into it_ (just remember how you did it and be consistent)\n *\n * WRONG!\n * if points in-between, then we have h/2 from the boundary\n * it shouldn't be a problem, since boundary is 0 everywhere? (but outside, not inside)\n * additionally, if we divide into 40 intervals, we have 39 (not 38 points) in between\n * if we have 0 and 1 aligned with points, we don't have such problems\n *\n * N = 4\n * outer = 6\n * on [0,1] interval\n * divided by 6 we get 1/6th...\n * if we have 4 points in the middle, we should divide by 5 (6 points == 5 intervals)!!!\n *\n */\nclass Partitioner : private NonCopyable {\n\t/* Responsibilities\n\t * - check for partitioning correctness\n\t * - get index and numerical offsets\n\t */\npublic:\n\tPartitioner(const Coord node_count, const NumType lower_b, NumType upper_b, const Coord grid_dimm)\n\t\t\t: nodeCount(node_count), lowerB(lower_b), upperB(upper_b), grid_dimm(grid_dimm)\n\t{\n\t\th = (upperB - lowerB)/(grid_dimm+1);\n\t\tverify_values();\n\t}\n\n\tint get_n_slice() {\n\t\treturn mh;\n\t}\n\n\tCoord partition_inner_size() {\n\t\treturn grid_dimm/sideLen;\n\t}\n\n\tNumType get_h() {\n\t\treturn h;\n\t}\n\n\n\t/**\n\t * Math offsets across point grid\n\t */\n\tstd::pair<NumType, NumType> get_math_offset_node(const int node_row, const int node_column) {\n\t\tauto x = (node_column*mh + 1)*h;\n\t\tauto y = (node_row*mh + 1)*h;\n\t\treturn std::make_pair(x,y);\n\t};\n\n\tstd::pair<int, int> node_id_to_grid_pos(int nodeId) {\n\t\tconst auto row = nodeId/sideLen;\n\t\tconst auto column = nodeId%sideLen;\n\t\treturn std::make_pair(row, column);\n\t};\n\n\tint get_nodes_grid_dimm() {\n\t\treturn sideLen;\n\t}\n\nprivate:\n\t/* characteristics of node grid */\n\tconst int nodeCount;\n\tint sideLen;\n\tCoord row;\n\tCoord column;\n\n\t/* qualities linking both worlds */\n\tint mh;\n\n\t/* characteristics of stored values and point grids */\n\tconst NumType lowerB;\n\tconst NumType upperB;\n\tconst Coord grid_dimm;\n\tNumType h;\n\n\tvoid verify_values() {\n\t\tauto sqr = static_cast<long>(std::sqrt(nodeCount));\n\t\tif(sqr*sqr != nodeCount) {\n\t\t\tthrow std::runtime_error(\"numer of nodes must be square\");\n\t\t} else {\n\t\t\tsideLen = sqr;\n\t\t}\n\n\t\tif(grid_dimm % sideLen != 0) {\n\t\t\tthrow std::runtime_error(\"point grid len must be evenly divisible by machine grid len\");\n\t\t} else {\n\t\t\tmh = grid_dimm/sideLen;\n\t\t}\n\t}\n};\n\n/* for nice plot: N = 40, timeSteps = 400 */\nstruct Config {\n\tCoord N = 40;\n\tTimeStepCount timeSteps = 400;\n\tbool outputEnabled = false;\n};\n\nConfig parse_cli(int argc, char **argv) {\n\tConfig conf;\n\n\tint c;\n\twhile (1) {\n\t\tc = getopt(argc, argv, \"n:t:o\");\n\t\tif (c == -1)\n\t\t\tbreak;\n\n\t\tswitch (c) {\n\t\t\tcase 'n':\n\t\t\t\tconf.N = std::stoull(optarg);\n\t\t\t\tbreak;\n\t\t\tcase 't':\n\t\t\t\tconf.timeSteps = std::stoull(optarg);\n\t\t\t\tbreak;\n\t\t\tcase 'o':\n\t\t\t\tconf.outputEnabled = true;\n\t\t\t\tbreak;\n\t\t}\n\t}\n\n\tstd::cerr << \"N = \" << conf.N << \", timeSteps = \" << conf.timeSteps << \", output = \" << conf.outputEnabled\n\t << std::endl;\n\n\treturn conf;\n}\n\nauto get_freq_sel(const TimeStepCount stepsCount) {\n\tauto dumpEvery = std::max(stepsCount/KEEP_X_TIMEFRAMES, static_cast<unsigned long int>(1));\n\n\tstd::function<bool(const Coord)> f = [dumpEvery](const Coord t) {\n\t\treturn t % dumpEvery == 0;\n\t};\n\n\treturn f;\n}\n\nauto sel_first_k_policy(TimeStepCount k) {\n\tstd::function<bool(const Coord)> f = [k](const Coord t) mutable {\n\t\tif(k > 0) {\n\t\t\tk--;\n\t\t\treturn true;\n\t\t} else {\n\t\t\treturn false;\n\t\t}\n\t};\n\n\treturn f;\n}\n\n/**\n * It doesn't plot borders, so it always queries workspace from 0 to size-1\n */\ntemplate <typename W>\nclass FileDumper : private NonCopyable {\npublic:\n\tFileDumper(const std::string prefix,\n\t const Coord n_partition,\n\t const NumType offset_x,\n\t const NumType offset_y,\n\t const NumType step,\n\t std::function<bool(const TimeStepCount)> selector)\n\t\t\t: prefix(prefix), N(n_partition), offset_x(offset_x), offset_y(offset_y), step(step), sel(selector),\n\t\t\t nextDumpId(0) {}\n\n\tvoid dumpBackbuffer(W& w, const TimeStepCount it_time, const Coord keep_snapshots = KEEP_X_POINTS) {\n\n\t\tif(!sel(it_time)) {\n\t\t\treturn;\n\t\t}\n\n\t\tauto edgeLen = w.getInnerLength();\n\t\tauto step = std::max(edgeLen/keep_snapshots, static_cast<long long int>(1));\n\n\t\t#ifdef DEBUG\n\t\tstd::cerr << \"edgeLen: \" << edgeLen\n\t\t\t\t << \"keep_snapshots\" << keep_snapshots\n\t\t << \" step: \" << step\n\t\t << \" offset_x: \" << offset_x\n\t\t << \" offset_y: \" << offset_y\n\t\t << std::endl;\n\t\t#endif\n\n\t\tif(step < 1) {\n\t\t\tthrow std::runtime_error(\"FileDumper: step == 0 -> infinite iteration\");\n\t\t}\n\n\t\tfilename.str(\"\");\n\t\tfilename << prefix << \"_\" << nextDumpId;\n\t\tauto fname = filename.str();\n\n\t\tstd::ofstream file;\n\t\tfile.open(fname);\n\t\tfile.precision(NumPrecision);\n\n\t\tDL( \"dumping\" )\n\n\t\tloop(edgeLen, step, [=, &w, &file](const Coord i) {\n\t\t\tloop(edgeLen, step, [=, &w, &file](const Coord j) {\n\t\t\t\tauto x = vr_x(i);\n\t\t\t\tauto y = vr_y(j);\n\t\t\t\tfile << x << \" \" << y << \" \" << it_time << \" \" << w.elb(i,j) << std::endl;\n\t\t\t});\n\n\t\t\tfile << std::endl;\n\t\t});\n\n\n\t\tDL( \"dump finished\" )\n\n\t\tfile.close();\n\n\t\tnextDumpId++;\n\t}\n\nprivate:\n\tconst std::string prefix;\n\tconst Coord N;\n\tstd::ostringstream filename;\n\n\tstd::function<bool(TimeStepCount)> sel;\n\tsize_t nextDumpId;\n\n\tconst NumType offset_x;\n\tconst NumType offset_y;\n\tconst NumType step;\n\n\tvoid loop(const Coord limit, const Coord step, std::function<void(const Coord)> f) {\n\t\tbool iShouldContinue = true;\n\t\tsize_t i = 0;\n\n\t\twhile(iShouldContinue) {\n\t\t\tif(i >= limit) {\n\t\t\t\tiShouldContinue = false;\n\n\t\t\t\t/* should we do one more iteration with variable exact to limit-1? */\n\t\t\t\tif(i - limit > step/4) {\n\t\t\t\t\ti = limit-1;\n\t\t\t\t} else {\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tf(i);\n\n\t\t\ti += step;\n\t\t}\n\t}\n\n\tNumType vr_x(const Coord idx) {\n\t\treturn offset_x + idx*step;\n\t}\n\n\tNumType vr_y(const Coord idx) {\n\t\treturn offset_y + idx*step;\n\t}\n};\n\n\nclass Timer : private NonCopyable {\npublic:\n\tTimer() {\n\t\tresetTm(tm);\n\t\tclock_getres(CLOCK, &tm);\n\t\tstd::cerr << \"Clock resolution: \" << tm.tv_sec << \" s \" << tm.tv_nsec << \" ns\" << std::endl;\n\t\tresetTm(tm);\n\t}\n\n\tvoid start() {\n\t\tclock_gettime(CLOCK, &tm);\n\t}\n\n\tDuration stop() {\n\t\ttimespec endTm;\n\t\tresetTm(endTm);\n\n\t\tclock_gettime(CLOCK, &endTm);\n\n\t\tDuration start = conv(tm);\n\t\tDuration end = conv(endTm);\n\n\t\treturn end - start;\n\t}\n\nprivate:\n\tconst static auto CLOCK = CLOCK_MONOTONIC;\n\ttimespec tm;\n\n\tvoid resetTm(timespec& t) {\n\t\tt.tv_sec = 0;\n\t\tt.tv_nsec = 0;\n\t}\n\n\t/**\n\t * @return value in us\n\t */\n\tDuration conv(timespec& t) {\n\t\treturn t.tv_sec*1000000000 + t.tv_nsec;\n\t}\n};\n\n\nvoid print_result(std::string algo_name, int nodeCount, long long int duration, Config c) {\n\tstd::cout << algo_name << \"\\t\"\n\t << nodeCount << \"\\t\"\n\t << c.timeSteps << \"\\t\"\n\t << c.N << \"\\t\"\n\t << duration/1000000 << std::endl;\n}\n\n/*\n * Must be defined on (0.0, 1.0)x(0.0, 1.0) surface\n */\nNumType f(NumType x, NumType y) {\n\treturn sin(M_PI*x)*sin(M_PI*y);\n}\n\nNumType equation(const NumType v_i_j, const NumType vi_j, const NumType v_ij, const NumType vij) {\n\tauto val = 0.25*(v_i_j + v_ij + vi_j + vij);\n\t// DL( \"(\" << v_i_j << \",\" << vi_j << \",\" << v_ij << \",\" << vij << \",\" << val << \")\" )\n\treturn val;\n}\n\n\n#define likely(x) __builtin_expect((x), 1)\n#define unlikely(x) __builtin_expect((x), 0)\n\n#endif //LAB1_SHARED_H\n" }, { "alpha_fraction": 0.5569620132446289, "alphanum_fraction": 0.5685654282569885, "avg_line_length": 23.33333396911621, "blob_id": "c126e57ef2c04c84f5138e93152ada2c3b042a80", "content_id": "bb5b9a0bd685082e8d2a805bad11abd5c2415db8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 948, "license_type": "no_license", "max_line_length": 72, "num_lines": 39, "path": "/merge_results.py", "repo_name": "KrzysztofNawara/ar-parallel-solver", "src_encoding": "UTF-8", "text": "import numpy as np\nimport pandas as pd\nimport StringIO\nimport sys\n\nNODE_COUNT = int(sys.argv[1])\nT_COUNT = int(sys.argv[2])\n\ndef pread(filename):\n return pd.read_csv(\"cmake-build-release/results-parts/\" + filename, \n delim_whitespace=True, header=None)\n\nconcated = []\nfor i in range(0, T_COUNT):\n stime = []\n for n in range(0,NODE_COUNT):\n stime.append(pread(\"{}_t_{}\".format(n, i)))\n \n c = pd.concat(stime)\n concated.append(c)\n\nfor i, c in zip(range(0, len(concated)), concated):\n dses = []\n\n dx = c.sort_values([0,1,2,3])\n for key in dx[0].unique():\n dw = dx[dx[0] == key]\n dses.append(dw)\n \n strings = []\n for d in dses:\n s = StringIO.StringIO()\n d.to_csv(s, header=None, sep=' ', index=False)\n strings.append(s.getvalue())\n\n c = \"\\n\".join(strings)\n f = open('cmake-build-release/results/t_{}'.format(i), 'w')\n f.write(c)\n f.close()" }, { "alpha_fraction": 0.5907196402549744, "alphanum_fraction": 0.598179042339325, "avg_line_length": 21.84461212158203, "blob_id": "9efc99385dc52ea101140ca93bb48f623b10fb72", "content_id": "2a194278038d3220cb60ae2ee6feab5d944a5c03", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 9116, "license_type": "no_license", "max_line_length": 108, "num_lines": 399, "path": "/src/parallel.cpp", "repo_name": "KrzysztofNawara/ar-parallel-solver", "src_encoding": "UTF-8", "text": "\n#include <mpi.h>\n#include <exception>\n#include <iostream>\n#include <cmath>\n#include <cstring>\n#include \"shared.h\"\n\n/**\n * ToDo\n * - log prefixing not needed!\n */\n\nconst int N_INVALID = -1;\n\nenum Neighbour {\n\tLEFT = 0,\n\tTOP = 1,\n\tRIGHT = 2,\n\tBOTTOM = 3,\n};\n\nclass ClusterManager {\npublic:\n\tClusterManager(const Coord N) : bitBucket(0) {\n\t\tMPI_Init(nullptr, nullptr);\n\t\tMPI_Comm_rank(comm, &nodeId);\n\t\tMPI_Comm_size(comm, &nodeCount);\n\n\t\tpartitioner = new Partitioner(nodeCount, 0.0, 1.0, N);\n\t\tsideLen = partitioner->get_nodes_grid_dimm();\n\t\tstd::tie(row, column) = partitioner->node_id_to_grid_pos(nodeId);\n\n\t\tinitNeighbours();\n\n\t\terr_log() << \"Cluster initialized successfully. I'm (\" << row << \",\" << column << \")\" << std::endl;\n\t}\n\n\t~ClusterManager() {\n\t\tdelete partitioner;\n\t\tMPI_Finalize();\n\t}\n\n\tPartitioner& getPartitioner() {return *partitioner;}\n\n\tint getNodeCount() { return nodeCount; }\n\tint getNodeId() { return nodeId; }\n\tstd::pair<NumType, NumType> getOffsets() { return partitioner->get_math_offset_node(row, column); };\n\tMPI_Comm getComm() { return comm; }\n\n\tstd::ostream& err_log() {\n\t\tstd::cerr << \"[\" << nodeId << \"] \";\n\t\treturn std::cerr;\n\t}\n\n\tstd::ostream& master_err_log() {\n\t\tif(nodeId == 0) {\n\t\t\tstd::cerr << \"[\" << nodeId << \"] \";\n\t\t\treturn std::cerr;\n\t\t} else {\n\t\t\treturn bitBucket;\n\t\t}\n\t}\n\n\tint* getNeighbours() {\n\t\treturn &neighbours[0];\n\t}\n\n\nprivate:\n\tconst static auto comm = MPI_COMM_WORLD;\n\n\tint nodeId;\n\tint nodeCount;\n\tint row;\n\tint column;\n\n\tPartitioner *partitioner;\n\n\tint sideLen;\n\tint neighbours[4];\n\n\tstd::ostream bitBucket;\n\n\tvoid initNeighbours() {\n\t\tif(row == 0) { neighbours[Neighbour::BOTTOM] = N_INVALID; }\n\t\telse { neighbours[Neighbour::BOTTOM] = nodeId-sideLen; }\n\n\t\tif(row == sideLen-1) { neighbours[Neighbour::TOP] = N_INVALID; }\n\t\telse { neighbours[Neighbour::TOP] = nodeId+sideLen; }\n\n\t\tif(column == 0) { neighbours[Neighbour::LEFT] = N_INVALID; }\n\t\telse { neighbours[Neighbour::LEFT] = nodeId-1; }\n\n\t\tif(column == sideLen-1) { neighbours[Neighbour::RIGHT] = N_INVALID; }\n\t\telse { neighbours[Neighbour::RIGHT] = nodeId+1; }\n\n\t\terr_log() << \"Neighbours: \"\n\t << \" LEFT: \" << neighbours[LEFT]\n\t << \" TOP: \" << neighbours[TOP]\n\t << \" RIGHT: \" << neighbours[RIGHT]\n\t << \" BOTTOM: \" << neighbours[BOTTOM] << std::endl;\n\t}\n};\n\nclass Comms {\npublic:\n\tComms(const Coord innerLength) : innerLength(innerLength) {\n\t\treset();\n\t}\n\n\tvoid exchange(int targetId, NumType* sendBuffer, NumType* receiveBuffer) {\n\t\tMPI_Isend(sendBuffer, innerLength, NUM_MPI_DT, targetId, 1, MPI_COMM_WORLD, rq + nextId);\n\t\tMPI_Irecv(receiveBuffer, innerLength, NUM_MPI_DT, targetId, MPI_ANY_TAG, MPI_COMM_WORLD, rq + nextId + 1);\n\n\t\tnextId += 2;\n\t}\n\n\tvoid wait() {\n\t\tDL( \"NextId: \" << nextId )\n\t\tfor(int i = 0; i < nextId; i++) {\n\t\t\tint finished;\n\t\t\tMPI_Waitany(nextId, rq, &finished, MPI_STATUSES_IGNORE);\n\t\t\tDL( \"Finished \" << finished << \". Already done \" << i+1 )\n\t\t}\n\t\tDL( \"Wait finished\" )\n\t}\n\n\tvoid reset() {\n\t\tfor(int i = 0; i < RQ_COUNT; i++) {\n\t\t\trq[i] = MPI_REQUEST_NULL;\n\t\t}\n\t\tnextId = 0;\n\t}\n\nprivate:\n\tconst static int RQ_COUNT = 8;\n\tconst Coord innerLength;\n\tMPI_Request rq[RQ_COUNT];\n\tint nextId;\n};\n\n\nclass Workspace {\npublic:\n\tWorkspace(const Coord innerSize, const NumType borderCond, ClusterManager& cm, Comms& comm)\n\t\t\t: innerLength(innerSize), actualSize(innerSize*innerSize), cm(cm), borderCond(borderCond), comm(comm)\n\t{\n\t\tneigh = cm.getNeighbours();\n\t\tfillBuffers();\n\t}\n\n\t~Workspace() {\n\t\tfreeBuffers();\n\t}\n\n\tvoid set_elf(const Coord x, const Coord y, const NumType value) {\n\t\t// copying to send buffers occurs during comms phase\n\t\tfront[x*innerLength+y] = value;\n\t}\n\n\tNumType elb(const Coord x, const Coord y) {\n\t\tif(x == -1) {\n\t\t\tif(y == -1) {\n\t\t\t\t// conrner - invalid query, we never ask about it\n\t\t\t\tthrow std::runtime_error(\"corner access!\");\n\t\t\t} else if (y == innerLength) {\n\t\t\t\t// corner - invalid query\n\t\t\t\tthrow std::runtime_error(\"corner access!\");\n\t\t\t} else {\n\t\t\t\t// left outer border\n\t\t\t\tif(neigh[LEFT] != N_INVALID) {\n\t\t\t\t\treturn outerEdge[LEFT][y];\n\t\t\t\t} else {\n\t\t\t\t\treturn borderCond;\n\t\t\t\t}\n\t\t\t}\t\n\t\t} else if (x == innerLength) {\n\t\t\tif(y == -1) {\n\t\t\t\t// conrner - invalid query, we never ask about it\n\t\t\t\tthrow std::runtime_error(\"corner access!\");\n\t\t\t} else if (y == innerLength) {\n\t\t\t\t// corner - invalid query\n\t\t\t\tthrow std::runtime_error(\"corner access!\");\n\t\t\t} else {\n\t\t\t\t// right outer border\n\t\t\t\tif(neigh[RIGHT] != N_INVALID) {\n\t\t\t\t\treturn outerEdge[RIGHT][y];\n\t\t\t\t} else {\n\t\t\t\t\treturn borderCond;\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tif(y == -1) {\n\t\t\t\tif(neigh[BOTTOM] != N_INVALID) {\n\t\t\t\t\treturn outerEdge[BOTTOM][x];\n\t\t\t\t} else {\n\t\t\t\t\treturn borderCond;\n\t\t\t\t}\n\t\t\t} else if (y == innerLength) {\n\t\t\t\tif(neigh[TOP] != N_INVALID) {\n\t\t\t\t\treturn outerEdge[TOP][x];\n\t\t\t\t} else {\n\t\t\t\t\treturn borderCond;\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// coords within main area\n\t\t\t\treturn back[x*innerLength+y];\n\t\t\t}\n\t\t}\n\t}\n\n\tCoord getInnerLength() {return innerLength;}\n\n\tvoid swap(bool comms = true) {\n\t\tif(comms) {\n\t\t\tcopyInnerEdgesToBuffers();\n\n\t\t\tcomm.reset();\n\t\t\tfor(int i = 0; i < 4; i++) {\n\t\t\t\tauto iThNeigh = neigh[i];\n\t\t\t\tif(iThNeigh != N_INVALID) {\n\t\t\t\t\tcomm.exchange(iThNeigh, innerEdge[i], outerEdge[i]);\n\t\t\t\t}\n\t\t\t}\n\t\t\tcomm.wait();\n\t\t}\n\n\t\tswapBuffers();\n\t}\n\nprivate:\n\tClusterManager& cm;\n\tComms& comm;\n\tint* neigh;\n\n\tconst Coord innerLength;\n\tconst Coord actualSize;\n\n\tconst NumType borderCond;\n\n\t/* horizontal could be stored with main buffer, but for convenience both horizontals and\n\t * verticals are allocated separatelly (and writes mirrored) */\n\tNumType* innerEdge[4];\n\t/* all outer edges are allocated separatelly; their length is innerLength, not innerLength + 2 */\n\tNumType* outerEdge[4];\n\tNumType *front;\n\tNumType *back;\n\n\tvoid fillBuffers() {\n\t\tfront = new NumType[actualSize];\n\t\tback = new NumType[actualSize];\n\n\t\tfor(int i = 0; i < 4; i++) {\n\t\t\tif(neigh[i] != N_INVALID) {\n\t\t\t\tinnerEdge[i] = new NumType[innerLength];\n\t\t\t\touterEdge[i] = new NumType[innerLength];\n\t\t\t} else {\n\t\t\t\tinnerEdge[i] = nullptr;\n\t\t\t\touterEdge[i] = nullptr;\n\t\t\t}\n\t\t}\n\n\t\t// flipping buffers requires to flip poitners to inner/outer ones (if part is shared -> don't share?)\n\t\t// outer buffers are only associated with back buffer, but inner are mainly associated with both\n\t}\n\n\tvoid freeBuffers() {\n\t\tdelete[] front;\n\t\tdelete[] back;\n\n\t\tfor(int i = 0; i < 4; i++) {\n\t\t\tif(innerEdge != nullptr) {\n\t\t\t\tdelete[] innerEdge[i];\n\t\t\t\tdelete[] outerEdge[i];\n\t\t\t}\n\t\t}\n\t}\n\n\tNumType* elAddress(const Coord x, const Coord y, NumType* base) {\n\t\treturn base + innerLength*x + y;\n\t}\n\n\tvoid swapBuffers() {\n\t\tNumType* tmp = front;\n\t\tfront = back;\n\t\tback = tmp;\n\t}\n\n\tvoid copyInnerEdgesToBuffers() {\n\t\t#define LOOP(EDGE, X, Y, BUFF) \\\n\t\tif(neigh[EDGE] != N_INVALID) { \\\n\t\t\tfor(Coord i = 0; i < innerLength; i++) { \\\n\t\t\t\tinnerEdge[EDGE][i] = *elAddress(X,Y,BUFF); \\\n\t\t\t} \\\n\t\t}\n\n\t\tLOOP(TOP, i, innerLength-1, front)\n\t\tLOOP(BOTTOM, i, 0, front)\n\t\tLOOP(LEFT, 0, i, front)\n\t\tLOOP(RIGHT, innerLength-1, i, front)\n\n\t\t#undef LOOP\n\t}\n};\n\nstd::string filenameGenerator(int nodeId) {\n\tstd::ostringstream oss;\n\toss << \"./results/\" << nodeId << \"_t\";\n\treturn oss.str();\n}\n\nint main(int argc, char **argv) {\n\tstd::cerr << __FILE__ << std::endl;\n\n\tauto conf = parse_cli(argc, argv);\n\n\tClusterManager cm(conf.N);\n\tauto n_slice = cm.getPartitioner().get_n_slice();\n\tNumType x_offset, y_offset;\n\tstd::tie(x_offset, y_offset) = cm.getOffsets();\n\tauto h = cm.getPartitioner().get_h();\n\n\tComms comm(n_slice);\n\tWorkspace w(n_slice, 0.0, cm, comm);\n\n\tFileDumper<Workspace> d(filenameGenerator(cm.getNodeId()),\n\t n_slice,\n\t x_offset,\n\t y_offset,\n\t h,\n\t get_freq_sel(conf.timeSteps));\n\n\tTimer timer;\n\n\tMPI_Barrier(cm.getComm());\n\ttimer.start();\n\n\tfor(Coord x_idx = 0; x_idx < n_slice; x_idx++) {\n\t\tfor(Coord y_idx = 0; y_idx < n_slice; y_idx++) {\n\t\t\tauto x = x_offset + x_idx*h;\n\t\t\tauto y = y_offset + y_idx*h;\n\t\t\tauto val = f(x,y);\n\t\t\tw.set_elf(x_idx,y_idx, val);\n\n\t\t\t#ifdef DEBUG\n\t\t\tstd::cerr << \"[\" << x_idx << \",\" << y_idx <<\"] \"\n\t\t\t << \"(\" << x << \",\" << y << \") -> \"\n\t\t\t << val << std::endl;\n\t\t\t#endif\n\t\t}\n\t}\n\n\tw.swap();\n\n\tfor(TimeStepCount ts = 0; ts < conf.timeSteps; ts++) {\n\t\tDL( \"Entering timestep loop, ts = \" << ts )\n\n\t\tfor(Coord x_idx = 0; x_idx < n_slice; x_idx++) {\n\t\t\tDL( \"Entering X loop, x = \" << x_idx )\n\n\t\t\tfor(Coord y_idx = 0; y_idx < n_slice; y_idx++) {\n\t\t\t\tDL( \"Entering Y loop, x y \" << y_idx )\n\n\t\t\t\tauto eq_val = equation(\n\t\t\t\t\t\tw.elb(x_idx - 1, y_idx),\n\t\t\t\t\t\tw.elb(x_idx, y_idx - 1),\n\t\t\t\t\t\tw.elb(x_idx + 1, y_idx),\n\t\t\t\t\t\tw.elb(x_idx, y_idx + 1)\n\t\t\t\t);\n\n\t\t\t\tw.set_elf(x_idx, y_idx, eq_val);\n\t\t\t}\n\t\t}\n\n\t\tDL( \"Before swap, ts = \" << ts )\n\n\t\tw.swap();\n\n\t\tDL( \"Entering file dump\" )\n\n\t\tif (unlikely(conf.outputEnabled)) {\n\t\t\td.dumpBackbuffer(w, ts);\n\t\t}\n\n\t\tDL( \"After dump, ts = \" << ts )\n\t}\n\n\tMPI_Barrier(cm.getComm());\n\tauto duration = timer.stop();\n\n\tif(cm.getNodeId() == 0) {\n\t\tprint_result(\"parallel\", cm.getNodeCount(), duration, conf);\n\t\tstd::cerr << ((double)duration)/1000000000 << \" s\" << std::endl;\n\t}\n\n\tDL( \"Terminating\" )\n\n\treturn 0;\n}\n" }, { "alpha_fraction": 0.5184381604194641, "alphanum_fraction": 0.5455531477928162, "avg_line_length": 14.913793563842773, "blob_id": "3640f466605c52baa51f8ef75324b29ce030f821", "content_id": "266b1e87b1887a982e16746835e0540be9878e48", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 922, "license_type": "no_license", "max_line_length": 85, "num_lines": 58, "path": "/superc/pth/bootstrapper.sh", "repo_name": "KrzysztofNawara/ar-parallel-solver", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n\nSCRIPT_DIR=\"$(readlink -e $(dirname ${BASH_SOURCE[0]}))\"\nBASE_DIR=\"$(readlink -e $(dirname ${BASH_SOURCE[0]})/../../)\"\n\nUSAGE=\"bootstrapper.sh <variant> <node_num> single|multiple <time_steps> <grid_size>\"\n\nif [ -z \"$1\" ]; then\n echo \"$USAGE\"\n exit 1\nfi\n\nif [ -z \"$2\" ]; then\n echo \"$USAGE\"\n exit 1\nelse\n PROCESS_COUNT=\"$2\"\nfi\n\nif [ -z \"$3\" ]; then\n echo \"$USAGE\"\nelse\n MODE=\"$3\"\nfi\n\n# \"$SCRIPT_DIR\"/build.sh \"$1\"\n\nrm -f \"$BASE_DIR\"/ar.se\nrm -f \"$BASE_DIR\"/ar.so\n\nMEM=\"16gb\"\n\nif [ \"$MODE\" == 'single' ]; then\n TPN=$PROCESS_COUNT\n NC=1\n OPT=\"-o\"\nelse\n NC=$PROCESS_COUNT\n TPN=1\n OPT=\"\"\nfi\n\nCMD=\"sbatch\n -J ar-1\n -N $NC\n --ntasks-per-node $TPN\n --mem $MEM\n --time=00:10:00\n -A ccbmc6\n -p plgrid-testing\n --output ar.so\n --error ar.se\n --mail-type=END,FAIL\n [email protected]\n $SCRIPT_DIR/run.sh $1 $4 $5 $OPT\"\n\necho \"$CMD\"\n$CMD" }, { "alpha_fraction": 0.6549865007400513, "alphanum_fraction": 0.6819406747817993, "avg_line_length": 18.578947067260742, "blob_id": "80f4bcd74b33bf424ecf98a54584224a4e87084f", "content_id": "7109fd156b2ff9c05af60cd19cf6bd16f9f43110", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 371, "license_type": "no_license", "max_line_length": 85, "num_lines": 19, "path": "/superc/pth/build.sh", "repo_name": "KrzysztofNawara/ar-parallel-solver", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n\nmodule load tools/impi/2018\nmodule load plgrid/tools/cmake/3.7.2\n\npushd \"$HOME\"/ar-lab1/ > /dev/null\n\nmkdir -p cmake-build-release\npushd cmake-build-release > /dev/null\n\ncmake -DCMAKE_C_COMPILER=icc -DCMAKE_CXX_COMPILER=icpc -DCMAKE_BUILD_TYPE=Release ../\nif [ -z \"$1\" ]; then\n make all\nelse\n make \"$1\"\nfi\n\npopd > /dev/null\npopd > /dev/null" }, { "alpha_fraction": 0.636904776096344, "alphanum_fraction": 0.6473214030265808, "avg_line_length": 24.884614944458008, "blob_id": "d304ddfefe8b05034261b798e72f65cefb5f798c", "content_id": "21c95252bc7ce3639fd0fc205b29e0f43b917f7e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 672, "license_type": "no_license", "max_line_length": 105, "num_lines": 26, "path": "/xresults_from_superc.sh", "repo_name": "KrzysztofNawara/ar-parallel-solver", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n\nREL_DIR=\"$(dirname \"${BASH_SOURCE[0]}\")\"\nDIR=\"$(readlink -e $REL_DIR)\"\n\nif [ $1 == \"zeus\" ]; then\n MACHINE=zeus.cyfronet.pl\nelif [ $1 == \"icm\" ]; then\n MACHINE=login.icm.edu.pl\nelif [ $1 == \"wcss\" ]; then\n MACHINE=\"bem.wcss.pl\"\nelif [ $1 == \"pth\" ]; then\n MACHINE=\"prometheus.cyfronet.pl\"\nelse\n echo \"usage: script.sh zeus|icm\"\n exit 1\nfi\n\nTARGET_DIR=\"$DIR/cmake-build-release/results-parts/\"\nrm -rf \"$TARGET_DIR\"\nmkdir -p \"$TARGET_DIR\"\n\nPASS=`cat $DIR/pass`\nsshpass -p \"$PASS\" rsync -avzr plgblueeyedhush@$MACHINE:ar-lab1/cmake-build-release/results \"$TARGET_DIR\"\nmv \"$TARGET_DIR\"/results/* \"$TARGET_DIR\"\nrmdir \"$TARGET_DIR\"/results" }, { "alpha_fraction": 0.5801354646682739, "alphanum_fraction": 0.5959367752075195, "avg_line_length": 30.64285659790039, "blob_id": "c101dfb1ee9ad35a27a64a13ebd3dbce745b3778", "content_id": "4582bcc2e8552c94f5f2054c9a41208fab02c540", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 443, "license_type": "no_license", "max_line_length": 83, "num_lines": 14, "path": "/superc/pth/tests/t0_scheduler.py", "repo_name": "KrzysztofNawara/ar-parallel-solver", "src_encoding": "UTF-8", "text": "\nimport os\nfrom common import *\n\n# Test 0 - run all algorithms on fixed size problem, one process per node\nTEST_ID = 0\n\nnode_counts = [1, 4, 9, 16]\n\nlog_dir, rdir = prepare_log_and_result_dirs_for_test(TEST_ID)\n\nfor nc in node_counts:\n os.system(run_batch_string(nc, scripts_dir + \"t{}_executor.py\".format(TEST_ID),\n log_prefix=log_dir + \"{}_nodes\".format(nc),\n results_dir=rdir))" }, { "alpha_fraction": 0.7721893787384033, "alphanum_fraction": 0.7781065106391907, "avg_line_length": 29.75757598876953, "blob_id": "e36d0a4fc675a5064234085c2e17798f576bf5c2", "content_id": "90685a2b9a3b0e165e776f923e5c7dbd14fad563", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "CMake", "length_bytes": 1014, "license_type": "no_license", "max_line_length": 56, "num_lines": 33, "path": "/CMakeLists.txt", "repo_name": "KrzysztofNawara/ar-parallel-solver", "src_encoding": "UTF-8", "text": "cmake_minimum_required(VERSION 3.7.2)\nproject(lab1)\n\nset(CMAKE_CXX_STANDARD 14)\n\n# sequential variant\nset(SEQ_SOURCE_FILES src/seq.cpp)\nadd_executable(seq ${SEQ_SOURCE_FILES})\n\n# parallel variant\nfind_package(MPI REQUIRED)\n\ninclude_directories(${MPI_INCLUDE_PATH})\n\nset(PAR_SOURCE_FILES src/parallel.cpp)\nadd_executable(parallel ${PAR_SOURCE_FILES})\ntarget_link_libraries(parallel ${MPI_LIBRARIES})\n\nset(PAR_LB_SOURCE_FILES src/parallel_lb.cpp)\nadd_executable(parallel_lb ${PAR_LB_SOURCE_FILES})\ntarget_link_libraries(parallel_lb ${MPI_LIBRARIES})\n\nset(PAR_ASYNC_SOURCE_FILES src/parallel_async.cpp)\nadd_executable(parallel_async ${PAR_ASYNC_SOURCE_FILES})\ntarget_link_libraries(parallel_async ${MPI_LIBRARIES})\n\nset(PAR_GAP_SOURCE_FILES src/parallel_gap.cpp)\nadd_executable(parallel_gap ${PAR_GAP_SOURCE_FILES})\ntarget_link_libraries(parallel_gap ${MPI_LIBRARIES})\n\nset(PAR_TS_SOURCE_FILES src/parallel_ts.cpp)\nadd_executable(parallel_ts ${PAR_TS_SOURCE_FILES})\ntarget_link_libraries(parallel_ts ${MPI_LIBRARIES})" }, { "alpha_fraction": 0.5733082890510559, "alphanum_fraction": 0.5911654233932495, "avg_line_length": 23.18181800842285, "blob_id": "0b933f78e3780793078ed06f251f4116c858d92d", "content_id": "7c88c23af56b1a43fe6ff92b3de1bbd400a31ad9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 1064, "license_type": "no_license", "max_line_length": 114, "num_lines": 44, "path": "/pth_preview.sh", "repo_name": "KrzysztofNawara/ar-parallel-solver", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n\nset -e\n\nSCRIPT_DIR=\"$(readlink -e $(dirname ${BASH_SOURCE[0]}))\"\nPARTS=\"$SCRIPT_DIR\"/cmake-build-release/results-parts/\nFINAL=\"$SCRIPT_DIR\"/cmake-build-release/results/\nFRAMES=\"$SCRIPT_DIR\"/cmake-build-release/frames/\n\nmkdir -p \"$PARTS\"\nmkdir -p \"$FINAL\"\n\nif [ -z \"$2\" ]; then\n rm -f \"$PARTS\"/*\n rm -f \"$FINAL\"/*\n\n # download from PTH\n \"$SCRIPT_DIR\"/xresults_from_superc.sh pth\nfi\n\n# find highest node\nN=`find cmake-build-release/results-parts/ -printf \"%f\\n\" | sed -E 's/([0-9])+_t_[0-9]+/\\1/' | sort -g | tail -n1`\nNn=$(( N+1 ))\n\n# find highest t\nT=`find cmake-build-release/results-parts/ -printf \"%f\\n\" | sed -E 's/[0-9]+_t_([0-9]+)/\\1/' | sort -g | tail -n1`\nTn=$(( T+1 ))\n\necho \"Tn: $Tn; Nn: $Nn\"\n\n# python script\npython \"$SCRIPT_DIR\"/merge_results.py $Nn $Tn\n\n# plot\nif [ \"$1\" == \"a\" ]; then\n gnuplot -e \"n=$Tn\" \"$SCRIPT_DIR\"/plot_pth.gp\nelif [ \"$1\" == \"f\" ]; then\n mkdir -p \"$FRAMES\"\n rm -f \"$FRAMES\"/*\n gnuplot -e \"n=$Tn\" \"$SCRIPT_DIR\"/plot_pth_frames.gp\nelse\n echo \"frame or animation mode?\"\n exit 1\nfi\n" }, { "alpha_fraction": 0.5337597727775574, "alphanum_fraction": 0.5415778160095215, "avg_line_length": 22.065574645996094, "blob_id": "e47e497433c960de3b8c91ae397f51c7d5e1c4e2", "content_id": "e2d647e6f8d93d14702316790a3ca56eefadc3ed", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1407, "license_type": "no_license", "max_line_length": 77, "num_lines": 61, "path": "/log_splitter.py", "repo_name": "KrzysztofNawara/ar-parallel-solver", "src_encoding": "UTF-8", "text": "\nimport sys\nimport re\n\nREAD_STDIN = True\n\ndef splitter(sentence, mapping):\n split = re.split(\"\\[[0-9]+?\\]\", sentence)\n\n\n if len(split) > 1:\n groups = []\n for m in re.finditer(\"\\[([0-9]+?)\\]\", sentence):\n groups.append(m.group(1))\n\n if split[0]:\n # if first is non-empty, line didn't start with node id indicator\n mapping[\"_\"].append(split[0])\n mapping[\"_\"].append(\" (???) \\n\")\n\n for node_id, str in zip(groups, split[1:]):\n if node_id not in mapping:\n mapping[node_id] = []\n\n if str[0] == ' ':\n str = str[1:]\n\n mapping[node_id].append(str)\n\n\n\n else:\n mapping[\"_\"].append(sentence)\n\n return mapping\n\nif READ_STDIN:\n lines = sys.stdin.readlines()\nelse:\n f = open(\"ar.se\")\n lines = f.readlines()\n f.close()\n\nnode_to_log_mapping = {\"_\": []}\nfor line in lines:\n splitter(line, node_to_log_mapping)\n\nnode_to_str = {}\nfor k in sorted(node_to_log_mapping.iterkeys()):\n node_to_str[k] = \"\".join(node_to_log_mapping[k])\n\n# print to stdout everything\nfor k in sorted(node_to_str.iterkeys()):\n sys.stdout.write(\"~~~~ {} ~~~~\\n\".format(k))\n sys.stdout.write(node_to_str[k])\n sys.stdout.write(\"\\n\\n\")\n\n# print to separate files\nfor k in node_to_str.iterkeys():\n f = open(\"{}_out\".format(k), \"w\")\n f.write(node_to_str[k])\n f.close()" }, { "alpha_fraction": 0.6005051136016846, "alphanum_fraction": 0.6090704202651978, "avg_line_length": 24.471328735351562, "blob_id": "af360ec682f201536807411bff1e7e8dc6ea9f7f", "content_id": "09b6b82a4b3e410f23f01981cc4120e7baec8aa8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 18213, "license_type": "no_license", "max_line_length": 114, "num_lines": 715, "path": "/src/parallel_gap.cpp", "repo_name": "KrzysztofNawara/ar-parallel-solver", "src_encoding": "UTF-8", "text": "\n#include <mpi.h>\n#include <exception>\n#include <iostream>\n#include <cmath>\n#include <cstring>\n#include <iomanip>\n#include \"shared.h\"\n\nconst int N_INVALID = -1;\n\nenum Neighbour {\n\tLEFT = 0,\n\tTOP = 1,\n\tRIGHT = 2,\n\tBOTTOM = 3,\n};\n\nclass ClusterManager : private NonCopyable {\npublic:\n\tClusterManager(const Coord N) : bitBucket(0) {\n\t\tMPI_Init(nullptr, nullptr);\n\t\tMPI_Comm_rank(comm, &nodeId);\n\t\tMPI_Comm_size(comm, &nodeCount);\n\n\t\tpartitioner = new Partitioner(nodeCount, 0.0, 1.0, N);\n\t\tsideLen = partitioner->get_nodes_grid_dimm();\n\t\tstd::tie(row, column) = partitioner->node_id_to_grid_pos(nodeId);\n\n\t\tinitNeighbours();\n\n\t\terr_log() << \"Cluster initialized successfully. I'm (\" << row << \",\" << column << \")\" << std::endl;\n\t}\n\n\t~ClusterManager() {\n\t\tdelete partitioner;\n\t\tMPI_Finalize();\n\t}\n\n\tPartitioner& getPartitioner() {return *partitioner;}\n\n\tint getNodeCount() { return nodeCount; }\n\tint getNodeId() { return nodeId; }\n\tstd::pair<NumType, NumType> getOffsets() { return partitioner->get_math_offset_node(row, column); };\n\tMPI_Comm getComm() { return comm; }\n\n\tstd::ostream& err_log() {\n\t\treturn std::cerr;\n\t}\n\n\tstd::ostream& master_err_log() {\n\t\tif(nodeId == 0) {\n\t\t\treturn std::cerr;\n\t\t} else {\n\t\t\treturn bitBucket;\n\t\t}\n\t}\n\n\tint* getNeighbours() {\n\t\treturn &neighbours[0];\n\t}\n\n\nprivate:\n\tconst static auto comm = MPI_COMM_WORLD;\n\n\tint nodeId;\n\tint nodeCount;\n\tint row;\n\tint column;\n\n\tPartitioner *partitioner;\n\n\tint sideLen;\n\tint neighbours[4];\n\n\tstd::ostream bitBucket;\n\n\tvoid initNeighbours() {\n\t\tif(row == 0) { neighbours[Neighbour::BOTTOM] = N_INVALID; }\n\t\telse { neighbours[Neighbour::BOTTOM] = nodeId-sideLen; }\n\n\t\tif(row == sideLen-1) { neighbours[Neighbour::TOP] = N_INVALID; }\n\t\telse { neighbours[Neighbour::TOP] = nodeId+sideLen; }\n\n\t\tif(column == 0) { neighbours[Neighbour::LEFT] = N_INVALID; }\n\t\telse { neighbours[Neighbour::LEFT] = nodeId-1; }\n\n\t\tif(column == sideLen-1) { neighbours[Neighbour::RIGHT] = N_INVALID; }\n\t\telse { neighbours[Neighbour::RIGHT] = nodeId+1; }\n\n\t\terr_log() << \"Neighbours: \"\n\t\t << \" LEFT: \" << neighbours[LEFT]\n\t\t << \" TOP: \" << neighbours[TOP]\n\t\t << \" RIGHT: \" << neighbours[RIGHT]\n\t\t << \" BOTTOM: \" << neighbours[BOTTOM] << std::endl;\n\t}\n};\n\n/*\n * Buffer exposal during async\n * I - start of innies calculation\n * O - start of outies calulations\n * s - swap, calculations finished for given iteration\n * out_r - recv, period of outer buffers exposal to the network\n * out_s - send, period of inner buffers exposal to the network\n *\n * I O s I O s\n * - out_r -| |-- out_r -|\n * - out_s -| |-- out_s -|\n *\n * receive (outer) - needed when calculating border values\n *\t* must be present when i-1 outies calculated\n *\t* can lie idle during subsequent outies calculation (assuming no memcpy)\n * send (inner)\n *\t* can be sent only when values calculated (happens right after outer become available)\n *\t* can be exposed only until outies from next iteration need to be calculated\n *\n * Memcpy impact?\n * Separate inner buffer: we don't have to wait with i+1 outies calculation until buffers are free (otherwise\n * we could overwrite data being sent)\n * Separate outer buffer: data required to carry out computations, but we can have a couple of spares with\n * outstanding receive request attached\n *\n * Single memcpied send buffer:\n * Allow to extend buffer exposure into outies calculation phase\n *\n * I O s I O s\n * - out_r -| |-- out_r -|\n * --out_m--|xxxx| memcpy-> out_s1\n * - out_s1 -----| |------------|\n *\n */\n\nclass Comms : private NonCopyable {\npublic:\n\tComms() {\n\t\treset_rqb(send_rqb, false);\n\t\treset_rqb(recv_rqb, false);\n\t}\n\n\t~Comms() {\n\t\t// cancel outstanding receives\n\t\treset_rqb(recv_rqb, true);\n\t}\n\n\tvoid wait_for_send() {\n\t\twait_for_rqb(send_rqb);\n\t}\n\n\tvoid wait_for_receives() {\n\t\twait_for_rqb(recv_rqb);\n\t}\n\n\t#define SCHEDULE_OP(OP, RQB) \\\n\t\tauto idx = RQB.second; \\\n\t\tauto* rq = RQB.first + idx; \\\n\t\tOP(buffer, size, type, nodeId, 1, MPI_COMM_WORLD, rq); \\\n\t\tRQB.second++;\n\n\tvoid schedule_send(int nodeId, NumType *buffer, Coord size, MPI_Datatype type) {\n\t\tDL( \"schedule send to \" << nodeId )\n\t\tSCHEDULE_OP(MPI_Isend, send_rqb)\n\t\tDL( \"rqb afterwards\" << send_rqb.second )\n\t}\n\n\tvoid schedule_recv(int nodeId, NumType *buffer, Coord size, MPI_Datatype type) {\n\t\tDL( \"schedule receive from \" << nodeId )\n\t\tSCHEDULE_OP(MPI_Irecv, recv_rqb)\n\t\tDL( \"rqb afterwards\" << recv_rqb.second )\n\t}\n\n\t#undef SCHEDULE_OP\n\nprivate:\n\tconst static int RQ_COUNT = 4;\n\tusing RqBuffer = std::pair<MPI_Request[RQ_COUNT], int>; \n\t\n\tRqBuffer send_rqb;\n\tRqBuffer recv_rqb;\n\n\tvoid reset_rqb(RqBuffer& b, bool pendingWarn) {\n\t\tfor(int i = 0; i < RQ_COUNT; i++) {\n\t\t\tif(b.first[i] != MPI_REQUEST_NULL) {\n\t\t\t\t/* commenting out because caused error:\n\t\t\t\t * Fatal error in PMPI_Cancel: Invalid MPI_Request, error stack:\n\t\t\t\t * PMPI_Cancel(201): MPI_Cancel(request=0x7ffc407347c8) failed\n\t\t\t\t * PMPI_Cancel(177): Null Request pointer\n\t\t\t\t */\n\t\t\t\t// MPI_Cancel(b.first + i);\n\t\t\t\tb.first[i] = MPI_REQUEST_NULL;\n\n\t\t\t\tif(pendingWarn) {\n\t\t\t\t\tstd::cerr << \"WARN: pending request left in the queue, cancelling it!\" << std::endl;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tb.second = 0;\n\t}\n\t\n\tvoid wait_for_rqb(RqBuffer& b) {\n\t\t//DL( \"waiting for rqb\" )\n\t\tfor(int i = 0; i < b.second; i++) {\n\t\t\t//DL( \"iteration: \" << i )\n\t\t\tint finished_idx;\n\t\t\tMPI_Waitany(b.second, b.first, &finished_idx, MPI_STATUSES_IGNORE);\n\t\t}\n\n\t\t//DL( \"finished waiting for rqb!\" )\n\t\treset_rqb(b, true);\n\t\t//DL( \"finished resettng rqb\" );\n\t}\n};\n\n\n/*\n * Vertical borders (y - external, x - internal)\n * ___________\n * |___________|\n * |y|x|___|x|y|\n * |y|x|___|x|y|\n * |y|x|___|x|y|\n * |___________|\n *\n * Horizontal borders\n * ___________\n * |__yyyyyyy__|\n * | |xxxxxxx| |\n * | | |___| | |\n * | |xxxxxxx| |\n * |__yyyyyyy__|\n *\n * In case of internal borders we have overlap, with external we don't\n */\n\nenum border_side {\n\tIN = 0,\n\tOUT = 4,\n};\n\nclass NeighboursCommProxy {\npublic:\n\tNeighboursCommProxy(int* neigh_mapping, \n\t const Coord innerLength, \n\t const Coord gap_width, \n\t std::function<Coord(const Coord, const Coord)> cm) : inner_size(innerLength)\n\t\t\t\n\t{\n\t\tconst auto outer_size = inner_size + 2*gap_width;\n\t\tconst auto nm = neigh_mapping;\n\n\t\tMPI_Type_vector(inner_size, gap_width, outer_size, NUM_MPI_DT, &vert_dt);\n\t\tMPI_Type_commit(&vert_dt);\n\n\t\t/* put here coordinates of the beginning; since storage is flipped horizontally, (0,0) /x,y/\n\t\t * is stored at the beginning, then (1,0), (2,0), ... (0,1) and so on\n\t\t */\n\t\tinfo[IN + LEFT] = comms_info(nm[LEFT], cm(0,0), vert_dt, 1);\n\t\tinfo[IN + RIGHT] = comms_info(nm[RIGHT], cm(inner_size-gap_width, 0), vert_dt, 1);\n\t\tinfo[IN + TOP] = comms_info(nm[TOP], cm(0,inner_size-1), NUM_MPI_DT, inner_size);\n\t\tinfo[IN + BOTTOM] = comms_info(nm[BOTTOM], cm(0,0), NUM_MPI_DT, inner_size);\n\n\t\tinfo[OUT + LEFT] = comms_info(nm[LEFT], cm(-1,0), vert_dt, 1);\n\t\tinfo[OUT + RIGHT] = comms_info(nm[RIGHT], cm(inner_size, 0), vert_dt, 1);\n\t\tinfo[OUT + TOP] = comms_info(nm[TOP], cm(0,inner_size), NUM_MPI_DT, inner_size);\n\t\tinfo[OUT + BOTTOM] = comms_info(nm[BOTTOM], cm(0,-1), NUM_MPI_DT, inner_size);\n\n\t\tDL( \"inner_size = \" << inner_size << \", gap_width = \" << gap_width << \", outer_size = \" << outer_size )\n\n\t\t#ifdef DEBUG\n\t\tfor(int i = 0; i < 8; i++) {\n\t\t\tstd::cerr << \"CommsInfo: node_id = \" << info[i].node_id << \", offset = \" << info[i].offset << \", type = \"\n\t\t\t << ((info[i].type == vert_dt) ? \"vert_dt\" : \"num_type\") << std::endl;\n\t\t}\n\t\t\t#endif\n\t}\n\n\t~NeighboursCommProxy() {\n\t\tMPI_Type_free(&vert_dt);\n\t}\n\n\tvoid schedule_send(Comms& c, Neighbour n, NumType* buffer) {\n\t\tauto& inf = info[IN + n];\n\t\tDL( \"proxy_send, neighbour: \" << n << \", bs: \" << bs << \", info_target: \" << inf.node_id << \", offset: \"\n\t\t << inf.offset << \", type = \" << ((inf.type == vert_dt) ? \"vert_dt\" : \"num_type\") )\n\t\tc.schedule_send(inf.node_id, buffer + inf.offset, inf.size, inf.type);\n\t}\n\n\tvoid schedule_recv(Comms& c, Neighbour n, NumType* buffer) {\n\t\tauto& inf = info[OUT + n];\n\t\tDL( \"proxy_recv, neighbour: \" << n << \", bs: \" << bs << \", info_target: \" << inf.node_id << \", offset: \"\n\t\t << inf.offset << \", type = \" << ((inf.type == vert_dt) ? \"vert_dt\" : \"num_type\") )\n\t\tc.schedule_recv(inf.node_id, buffer + inf.offset, inf.size, inf.type);\n\t}\n\nprivate:\n\tstruct comms_info {\n\t\tcomms_info() {}\n\t\tcomms_info(int nid, Coord offset, MPI_Datatype dt, Coord size)\n\t\t\t\t: offset(offset), type(dt), node_id(nid), size(size) {}\n\n\t\tCoord offset;\n\t\tMPI_Datatype type;\n\t\tint node_id;\n\t\tCoord size;\n\t};\n\n\tconst Coord inner_size;\n\tcomms_info info[8];\n\n\tMPI_Datatype vert_dt;\n};\n\n\nstruct CSet {\n\tCSet(const Coord x = 0, const Coord y = 0) : x(x), y(y) {}\n\n\tCoord x;\n\tCoord y;\n\n\tbool operator==(const CSet &o) const {\n\t\treturn x == o.x && y == o.y;\n\t}\n\n\tbool operator!=(const CSet &o) const {\n\t\treturn !operator==(o);\n\t}\n\n\tconst std::string toStr() {\n\t\tstd::ostringstream oss;\n\t\toss << \"(\" << x << \",\" << y << \")\";\n\t\treturn oss.str();\n\t}\n};\n\nstruct AreaCoords {\n\tAreaCoords() {}\n\tAreaCoords(const CSet bottomLeft, const CSet upperRight) : bottomLeft(bottomLeft), upperRight(upperRight) {}\n\n\tCSet bottomLeft;\n\tCSet upperRight;\n\n\tconst std::string toStr() {\n\t\tstd::ostringstream oss;\n\t\toss << \"[ \" << bottomLeft.toStr() << \" | \" << upperRight.toStr() << \"]\";\n\t\treturn oss.str();\n\t}\n};\n\n/**\n * Return inclusive ranges !!!\n */\nclass WorkspaceMetainfo : private NonCopyable {\npublic:\n\tWorkspaceMetainfo(const Coord innerSize, const Coord boundaryWidth) {\n\t\tprecalculate(innerSize, boundaryWidth);\n\t}\n\n\tconst AreaCoords& working_workspace_area() const { return wwa; }\n\n\tconst AreaCoords& innies_space_area() const { return isa; };\n\n\t/**\n\t * That's how shared areas are divided:\n\t * ___________\n\t * | |_______| |\n\t * | | | |\n\t * | | | |\n\t * | |_______| |\n\t * |_|_______|_|\n\t */\n\tconst std::array<AreaCoords, 4>& shared_areas() const { return sha; }\n\t\nprivate:\n\tAreaCoords wwa;\n\tAreaCoords isa;\n\tstd::array<AreaCoords, 4> sha;\n\t\n\tvoid precalculate(const Coord innerSize, const Coord boundaryWidth) {\n\t\tconst auto lid = innerSize-1;\n\t\t\n\t\twwa.bottomLeft.x = 0;\n\t\twwa.bottomLeft.y = 0;\n\t\twwa.upperRight.x = lid;\n\t\twwa.upperRight.y = lid;\n\n\t\tisa.bottomLeft.x = boundaryWidth;\n\t\tisa.bottomLeft.y = boundaryWidth;\n\t\tisa.upperRight.x = lid - boundaryWidth;\n\t\tisa.upperRight.y = lid - boundaryWidth;\n\t\t\n\t\tsha = {\n\t\t\tAreaCoords(CSet(0, 0), CSet(boundaryWidth-1, lid)), // left\n\t\t\tAreaCoords(CSet(innerSize - boundaryWidth, 0), CSet(lid, lid)), // right\n\t\t\tAreaCoords(CSet(boundaryWidth, innerSize-boundaryWidth), CSet(lid-boundaryWidth, lid)), // top\n\t\t\tAreaCoords(CSet(boundaryWidth, 0), CSet(lid-boundaryWidth, boundaryWidth-1)), // bottom\n\t\t};\n\t}\n};\n\nvoid test_wmi() {\n\tWorkspaceMetainfo wmi(9, 2);\n\n\tauto work_area = wmi.working_workspace_area();\n\tauto innie = wmi.innies_space_area();\n\tauto in_bound = wmi.shared_areas();\n\n\t#define STR(X) std::cerr << X.toStr() << std::endl;\n\n\tassert(work_area.bottomLeft == CSet(0,0));\n\tassert(work_area.upperRight == CSet(8,8));\n\n\n\tassert(innie.bottomLeft == CSet(2,2));\n\tassert(innie.upperRight == CSet(6,6));\n\n\t// left\n\tassert(in_bound[0].bottomLeft == CSet(0,0));\n\tassert(in_bound[0].upperRight == CSet(1,8));\n\t// right\n\tassert(in_bound[1].bottomLeft == CSet(7,0));\n\tassert(in_bound[1].upperRight == CSet(8,8));\n\t// top\n\tassert(in_bound[2].bottomLeft == CSet(2,7));\n\tassert(in_bound[2].upperRight == CSet(6,8));\n\t// bottom\n\tassert(in_bound[3].bottomLeft == CSet(2,0));\n\tassert(in_bound[3].upperRight == CSet(6,1));\n\n\t#undef STR\n}\n\nvoid iterate_over_area(AreaCoords area, std::function<void(const Coord, const Coord)> f) {\n\tfor(Coord x_idx = area.bottomLeft.x; x_idx <= area.upperRight.x; x_idx++) {\n\t\tfor(Coord y_idx = area.bottomLeft.y; y_idx <= area.upperRight.y; y_idx++) {\n\t\t\tf(x_idx, y_idx);\n\t\t}\n\t}\n}\n\nclass Workspace : private NonCopyable {\npublic:\n\tWorkspace(const Coord innerSize, const Coord borderWidth, ClusterManager& cm, Comms& comm)\n\t\t\t: innerSize(innerSize), cm(cm), comm(comm), borderWidth(borderWidth)\n\t{\n\t\touterSize = innerSize+2*borderWidth;\n\t\tmemorySize = outerSize*outerSize;\n\n\t\tneigh = cm.getNeighbours();\n\t\tinitialize_buffers();\n\n\t\tcomm_proxy = new NeighboursCommProxy(neigh, innerSize, borderWidth, [this](auto x, auto y) {\n\t\t\treturn this->get_offset(x,y);\n\t\t});\n\t}\n\n\t~Workspace() {\n\t\tdelete comm_proxy;\n\t\tfreeBuffers();\n\t}\n\n\tvoid set_elf(const Coord x, const Coord y, const NumType value) {\n\t\t*elAddress(x, y, front) = value;\n\t}\n\n\tNumType elb(const Coord x, const Coord y) {\n\t\treturn *elAddress(x,y,back);\n\t}\n\n\tCoord getInnerLength() {return innerSize;}\n\n\t/*\n\t * All 4 functions are called before swap() is invoked!\n\t * 2 first before outie calculations, last two after them\n\t */\n\n\tvoid ensure_out_boundary_arrived() {\n\t\tcomm.wait_for_receives();\n\t}\n\n\tvoid ensure_in_boundary_sent() {\n\t\tcomm.wait_for_send();\n\t}\n\n\tvoid send_in_boundary() {\n\t\tfor(int i = 0; i < 4; i++) {\n\t\t\tif(neigh[i] != N_INVALID) {\n\t\t\t\tcomm_proxy->schedule_send(comm, static_cast<Neighbour>(i), front);\n\t\t\t}\n\t\t}\n\t}\n\n\tvoid start_wait_for_new_out_border() {\n\t\tfor(int i = 0; i < 4; i++) {\n\t\t\tif(neigh[i] != N_INVALID) {\n\t\t\t\tcomm_proxy->schedule_recv(comm, static_cast<Neighbour>(i), front);\n\t\t\t}\n\t\t}\n\t}\n\n\tvoid swap() {\n\t\tswapBuffers();\n\t}\n\n\tvoid memory_dump(bool dump_front) {\n\t\tauto* buffer = dump_front ? front : back;\n\n\t\tfor(Coord i = 0; i < outerSize; i++) {\n\n\t\t\tfor(Coord j = 0; j < outerSize; j++) {\n\t\t\t\tstd::cerr << std::fixed << std::setprecision(2) << buffer[i*outerSize+j] << \" \";\n\t\t\t}\n\n\t\t\tstd::cerr << std::endl;\n\t\t}\n\t}\n\nprivate:\n\tClusterManager& cm;\n\tComms& comm;\n\tint* neigh;\n\tNeighboursCommProxy* comm_proxy;\n\n\tconst Coord innerSize;\n\tCoord outerSize;\n\tCoord memorySize;\n\n\tconst Coord borderWidth;\n\n\tNumType *front;\n\tNumType *back;\n\n\tvoid initialize_buffers() {\n\t\tfront = new NumType[memorySize];\n\t\tback = new NumType[memorySize];\n\n\t\tfor(Coord i = 0; i < memorySize; i++) {\n\t\t\tfront[i] = 0.0;\n\t\t\tback[i] = 0.0;\n\t\t}\n\t}\n\n\tvoid freeBuffers() {\n\t\tdelete[] front;\n\t\tdelete[] back;\n\t}\n\n\tNumType* elAddress(const Coord x, const Coord y, NumType* base) {\n\t\treturn base + get_offset(x,y);\n\t}\n\n\t/*\n\t * Because MPI reads (and writes) directy from front/back, memory layout is no longer arbitrary\n\t * I decided to store coordinate system in horizontally mirrored manner:\n\t * x\n\t * (0,0) -------------->\n\t * |\n\t * |\n\t * y |\n\t * |\n\t * |\n\t * |\n\t *\n\t * x corresponds to j, y corresponds to i\n\t * stored in row major manner ( adr = i*width + j = y*width + x )\n\t *\n\t */\n\tCoord get_offset(const Coord x, const Coord y) {\n\t\treturn outerSize*(borderWidth + y) + (borderWidth + x);\n\t}\n\n\tvoid swapBuffers() {\n\t\tNumType* tmp = front;\n\t\tfront = back;\n\t\tback = tmp;\n\t}\n};\n\nstd::string filenameGenerator(int nodeId) {\n\tstd::ostringstream oss;\n\toss << \"./results/\" << nodeId << \"_t\";\n\treturn oss.str();\n}\n\nconst Coord BOUNDARY_WIDTH = 1;\n\nint main(int argc, char **argv) {\n\tstd::cerr << __FILE__ << std::endl;\n\n\tauto conf = parse_cli(argc, argv);\n\n\tClusterManager cm(conf.N);\n\tauto n_slice = cm.getPartitioner().get_n_slice();\n\tNumType x_offset, y_offset;\n\tstd::tie(x_offset, y_offset) = cm.getOffsets();\n\tauto h = cm.getPartitioner().get_h();\n\n\tComms comm;\n\tWorkspace w(n_slice, BOUNDARY_WIDTH, cm, comm);\n\tWorkspaceMetainfo wi(n_slice, BOUNDARY_WIDTH);\n\n\tFileDumper<Workspace> d(filenameGenerator(cm.getNodeId()),\n\t n_slice,\n\t x_offset,\n\t y_offset,\n\t h,\n\t get_freq_sel(conf.timeSteps));\n\n\tTimer timer;\n\n\tMPI_Barrier(cm.getComm());\n\ttimer.start();\n\n\tauto ww_area = wi.working_workspace_area();\n\tauto wi_area = wi.innies_space_area();\n\tauto ws_area = wi.shared_areas();\n\n\tDL( \"filling boundary condition\" )\n\n\titerate_over_area(ww_area, [&w, x_offset, y_offset, h](const Coord x_idx, const Coord y_idx) {\n\t\tauto x = x_offset + x_idx*h;\n\t\tauto y = y_offset + y_idx*h;\n\t\tauto val = f(x,y);\n\t\tw.set_elf(x_idx,y_idx, val);\n\n\t\t/*\n\t\tstd::cerr << \"[\" << x_idx << \",\" << y_idx <<\"] \"\n\t\t\t << \"(\" << x << \",\" << y << \") -> \"\n\t\t\t << val << std::endl;\n */\n\t});\n\n\tDBG_ONLY( w.memory_dump(true) )\n\n\tDL( \"calculated boundary condition, initial communication\" )\n\n\t/* send our part of initial condition to neighbours */\n\tw.send_in_boundary();\n\tw.start_wait_for_new_out_border();\n\n\tDL( \"initial swap\" )\n\tw.swap();\n\n\tDL( \"initial communication done\" )\n\n\tauto eq_f = [&w](const Coord x_idx, const Coord y_idx) {\n\t\t// std::cerr << \"Entering Y loop, x y \" << y_idx << std::endl;\n\n\t\tauto eq_val = equation(\n\t\t\t\tw.elb(x_idx - 1, y_idx),\n\t\t\t\tw.elb(x_idx, y_idx - 1),\n\t\t\t\tw.elb(x_idx + 1, y_idx),\n\t\t\t\tw.elb(x_idx, y_idx + 1)\n\t\t);\n\n\t\tw.set_elf(x_idx, y_idx, eq_val);\n\t};\n\n\tfor(TimeStepCount ts = 0; ts < conf.timeSteps; ts++) {\n\t\tDL( \"Entering timestep loop, ts = \" << ts )\n\n\t\tDL( \"front dump - before innies calculated\" )\n\t\tDBG_ONLY( w.memory_dump(true) )\n\t\tDL (\"back dump - before innies calculated\")\n\t\tDBG_ONLY( w.memory_dump(false) )\n\n\t\titerate_over_area(wi_area, eq_f);\n\t\tDL( \"Innies iterated, ts = \" << ts )\n\n\t\tw.ensure_out_boundary_arrived();\n\t\tDL( \"Out boundary arrived, ts = \" << ts )\n\t\tw.ensure_in_boundary_sent();\n\t\tDL( \"In boundary sent, ts = \" << ts )\n\n\t\tDL( \"front dump - innies calculated\" )\n\t\tDBG_ONLY( w.memory_dump(true) )\n\t\tDL (\"back dump - innies calculated\")\n\t\tDBG_ONLY( w.memory_dump(false) )\n\n\t\tfor(auto a: ws_area) {\n\t\t\titerate_over_area(a, eq_f);\n\t\t}\n\n\t\tDL( \"Outies iterated, ts = \" << ts )\n\n\t\tDL( \"front dump - outies calculated\" )\n\t\tDBG_ONLY( w.memory_dump(true) )\n\t\tDL (\"back dump - outies calculated\")\n\t\tDBG_ONLY( w.memory_dump(false) )\n\n\t\tw.send_in_boundary();\n\t\tDL( \"In boundary send scheduled, ts = \" << ts )\n\t\tw.start_wait_for_new_out_border();\n\n\t\tDL( \"Entering file dump\" )\n\t\tif (unlikely(conf.outputEnabled)) {\n\t\t\td.dumpBackbuffer(w, ts);\n\t\t}\n\n\t\tDL( \"Before swap, ts = \" << ts )\n\t\tw.swap();\n\t\tDL( \"After swap, ts = \" << ts )\n\t}\n\n\tMPI_Barrier(cm.getComm());\n\tauto duration = timer.stop();\n\n\tif(cm.getNodeId() == 0) {\n\t\tprint_result(\"parallel_gap\", cm.getNodeCount(), duration, conf);\n\t\tstd::cerr << ((double)duration)/1000000000 << \" s\" << std::endl;\n\t}\n\n\tDL( \"Terminating\" )\n\n\treturn 0;\n}\n" }, { "alpha_fraction": 0.48571428656578064, "alphanum_fraction": 0.5821428298950195, "avg_line_length": 14.61111068725586, "blob_id": "574622e7078784293d4058f4f463ceb98e0ab122", "content_id": "4410e9c8f101633cc4c19b6572b977337e62910a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 280, "license_type": "no_license", "max_line_length": 52, "num_lines": 18, "path": "/superc/pth/tests/t1_executor.py", "repo_name": "KrzysztofNawara/ar-parallel-solver", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\nfrom common import *\n\n# (n, ts)\nworkloads = [\n (7200, 25),\n (3600, 100),\n (1800, 400),\n (900, 1600),\n]\n\ncmd_list = []\nfor n, ts in workloads:\n cmds = build_cmd_sequence(parallel_algos, n, ts)\n cmd_list = cmd_list + cmds\n\nrun_commands(cmd_list)" }, { "alpha_fraction": 0.6583333611488342, "alphanum_fraction": 0.7083333134651184, "avg_line_length": 19.16666603088379, "blob_id": "ee479c3d40184a39e05fd19aaead65a20154fc5e", "content_id": "f4e6cce4b717c920ad2dd511aef2d8ee03c1b1a1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 120, "license_type": "no_license", "max_line_length": 60, "num_lines": 6, "path": "/superc/pth/tests/t0_executor.py", "repo_name": "KrzysztofNawara/ar-parallel-solver", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\nfrom common import *\n\ncmds = build_cmd_sequence(parallel_algos, n = 7200, ts = 25)\nrun_commands(cmds)" }, { "alpha_fraction": 0.5915833115577698, "alphanum_fraction": 0.6036564111709595, "avg_line_length": 20.796993255615234, "blob_id": "a377708f792dcbfedf7e6b35d5f419480265ec27", "content_id": "3e732b911bca16effac74e9b4ffc2f76b9ad638a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2899, "license_type": "no_license", "max_line_length": 90, "num_lines": 133, "path": "/src/seq.cpp", "repo_name": "KrzysztofNawara/ar-parallel-solver", "src_encoding": "UTF-8", "text": "\n#include <time.h>\n#include <cstddef>\n#include <string>\n#include \"shared.h\"\n\n/**\n * Work area is indexed from 0 to size-1\n * Around that we have border area, which can be accessed using -1 and size indices\n *\n * When plotting results, border counts as 0, but plotter doesn't plot border values\n * (easier to implement that way, same plotter can be used for seq and parallel)\n */\nclass Workspace {\npublic:\n\tWorkspace(const Coord size) :\n\t\t\tinnerLength(size),\n\t\t\touterLength(size+2),\n\t\t\tactualSize(outerLength*outerLength),\n\t\t\tzeroOffset(1)\n\t{\n\t\tfront = new NumType[actualSize];\n\t\tback = new NumType[actualSize];\n\t}\n\n\t~Workspace() {\n\t\tdelete[] front;\n\t\tdelete[] back;\n\t}\n\n\t/**\n\t * @return reference to (i,j)th front buffer element\n\t */\n\tinline NumType& elf(const Coord x, const Coord y) {\n\t\treturn front[coords(x,y)];\n\t}\n\n\t/**\n\t * @return reference to (i,j)th back buffer element\n\t */\n\tinline NumType& elb(const Coord x, const Coord y) {\n\t\treturn back[coords(x,y)];\n\t}\n\n\tvoid zeroBuffers(const NumType b) {\n\t\tfor(Coord i = 0; i < actualSize; i++) {\n\t\t\tfront[i] = b;\n\t\t\tback[i] = b;\n\t\t}\n\t}\n\n\tCoord getInnerLength() {return innerLength;}\n\n\tvoid swap() {\n\t\tNumType* tmp = front;\n\t\tfront = back;\n\t\tback = tmp;\n\t}\n\nprivate:\n\tconst Coord zeroOffset;\n\tconst Coord innerLength;\n\tconst Coord outerLength;\n\tconst Coord actualSize;\n\tNumType *front;\n\tNumType *back;\n\n\tinline Coord coords(const Coord x, const Coord y) {\n\t\treturn outerLength*(x+zeroOffset)+(y+zeroOffset);\n\t}\n};\n\n\nint main(int argc, char **argv) {\n\tstd::cerr << __FILE__ << std::endl;\n\n\tauto conf = parse_cli(argc, argv);\n\n\tPartitioner p(1, 0.0, 1.0, conf.N);\n\n\t/* calculate helper values */\n\tconst NumType h = p.get_h();\n\tconst Coord n = p.partition_inner_size();\n\n\tTimer timer;\n\tWorkspace w(conf.N);\n\tNumType x_off, y_off;\n\tstd::tie(x_off, y_off) = p.get_math_offset_node(0,0);\n\n\tFileDumper<Workspace> d(\"./results/t\", n, x_off, y_off, h, get_freq_sel(conf.timeSteps));\n\n\ttimer.start();\n\t/* fill in boundary condition */\n\tfor(Coord x_idx = 0; x_idx < n; x_idx++) {\n\t\tfor(Coord y_idx = 0; y_idx < n; y_idx++) {\n\t\t\tauto x = x_idx*h;\n\t\t\tauto y = y_idx*h;\n\t\t\tauto val = f(x,y);\n\t\t\tw.elf(x_idx, y_idx) = val;\n\n\t\t\t#ifdef DEBUG\n\t\t\tstd::cerr << \"[\" << x_idx << \",\" << y_idx <<\"] \"\n\t\t\t << \"(\" << x << \",\" << y << \") -> \"\n\t\t\t << val << std::endl;\n\t\t\t#endif\n\t\t}\n\t}\n\n\tw.swap();\n\n\tfor(TimeStepCount step = 0; step < conf.timeSteps; step++) {\n\t\tfor(Coord x_idx = 0; x_idx < n; x_idx++) {\n\t\t\tfor(Coord y_idx = 0; y_idx < n; y_idx++) {\n\t\t\t\tw.elf(x_idx, y_idx) = equation(\n\t\t\t\t\t w.elb(x_idx-1 ,y_idx),\n\t\t\t\t\t w.elb(x_idx ,y_idx-1),\n\t\t\t\t\t w.elb(x_idx+1 ,y_idx),\n\t\t\t\t\t w.elb(x_idx ,y_idx+1)\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\n\t\tw.swap();\n\t\tif (unlikely(conf.outputEnabled)) {\n\t\t\td.dumpBackbuffer(w, step);\n\t\t}\n\t}\n\n\tauto duration = timer.stop();\n\tprint_result(\"seq\", 1, duration, conf);\n\tstd::cerr << ((double)duration)/1000000000 << \" s\" << std::endl;\n\n\treturn 0;\n}" }, { "alpha_fraction": 0.5431956052780151, "alphanum_fraction": 0.5501300096511841, "avg_line_length": 27.360654830932617, "blob_id": "a4e2eabab9cf78256bee71c59e1dd63e46e3bf13", "content_id": "33758b578bdc63fed19a865cb8d7b76d6b6c2700", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3461, "license_type": "no_license", "max_line_length": 113, "num_lines": 122, "path": "/superc/pth/tests/common.py", "repo_name": "KrzysztofNawara/ar-parallel-solver", "src_encoding": "UTF-8", "text": "\nimport os\nimport sys\n\nbase_dir = \"/net/people/plgblueeyedhush/ar-lab1/\"\nscripts_dir = base_dir + \"superc/pth/tests/\"\nbuild_dir = base_dir + \"cmake-build-release/\"\nresults_dir = base_dir + \"results/\"\nlogs_dir = base_dir + \"logs/\"\nmpiexec_prefix = \"mpiexec \" #\"mpiexec -ordered-output -prepend-rank \"\n\n\nparallel_algos = map(lambda postfix: \"parallel{}\".format(postfix), [\"\", \"_async\", \"_gap\", \"_lb\", \"_ts\"])\n\n# -------------------\n# Environment agnostic\n# -------------------\n\ndef ensure_dir_exists(dir):\n os.system(\"mkdir -p \" + dir)\n\ndef err(msg):\n sys.stderr.write(msg + \"\\n\")\n\n# -------------------\n# Meant for scheduler\n# -------------------\n\ndef run_batch_string(nodes,\n script,\n tasks_per_node = 1,\n mem_per_task = \"1gb\",\n queue=\"plgrid-short\",\n log_prefix=\"ar\",\n time=\"00:20:00\",\n repetition_no=3,\n results_dir=\"./\"):\n process_count = nodes * tasks_per_node\n cmd = (\"sbatch\"\n \" -J ar-1\"\n \" -N \" + str(nodes) +\n \" --ntasks-per-node \" + str(tasks_per_node) +\n \" --mem-per-cpu \" + mem_per_task +\n \" --time \" + time +\n \" -A ccbmc6\"\n \" -p \" + queue +\n \" --output \" + log_prefix + \".so\"\n \" --error \" + log_prefix + \".se\"\n \" --mail-type=END,FAIL\"\n \" [email protected] \" + script + \" {} {} {}\").format(process_count, repetition_no, results_dir)\n\n print cmd\n return cmd\n\ndef insert_separator_into_result_files(result_dir):\n separator = '\\\\n~~~~ `date \"+%H:%M:%S %d-%m-%Y\"` ~~~~\\\\n'\n cmd = \"for f in {}*; do echo -e \\\"{}\\\" >> $f; done\".format(result_dir, separator)\n os.system(cmd)\n\ndef prepare_log_and_result_dirs_for_test(test_id):\n log_dir = logs_dir + \"t{}/\".format(test_id)\n ensure_dir_exists(log_dir)\n\n rdir = results_dir + \"/t{}/\".format(test_id)\n ensure_dir_exists(rdir)\n insert_separator_into_result_files(rdir)\n\n return log_dir, rdir\n\n# -------------------\n# Meant for executor\n# -------------------\n\ndef import_modules_string():\n return (\n \"module load tools/impi/2018;\"\n \"module load plgrid/tools/cmake/3.7.2;\"\n )\n\ndef algo_cli(name, time_steps, grid_size, result_file = \"\", output=False):\n if name == 'seq':\n prefix = \"\"\n else:\n prefix = mpiexec_prefix\n\n base = prefix + \"{}/{} -t {} -n {} {}\".format(build_dir, name, time_steps, grid_size, \"-o\" if output else \"\")\n cmd = (base + \" >> \" + result_file) if result_file else base\n return cmd\n\ndef run_commands(cmds):\n cmd = \"; \".join(cmds)\n err(cmd)\n cmd = import_modules_string() + \" \" + cmd\n os.system(cmd)\n\ndef get_process_num():\n return int(sys.argv[1])\n\ndef get_repetition_no():\n return int(sys.argv[2])\n\ndef get_results_dir():\n return sys.argv[3]\n\ndef get_node_id():\n return os.environ[\"SLURM_PROCID\"]\n\n# makes decision whether we should include seq in the list\ndef get_algorithm_list(parallels):\n return [\"seq\"] + parallels if get_process_num() == 1 else parallels\n\ndef get_result_path(ts, n):\n return get_results_dir() + \"{}_{}_{}_{}\".format(ts, n, get_node_id(), get_process_num())\n\ndef build_cmd_sequence(parallels, n, ts):\n result_path = get_result_path(ts, n)\n\n full_cli = []\n for algo_name in get_algorithm_list(parallels):\n for i in range(0, get_repetition_no()):\n full_cli.append(algo_cli(algo_name, ts, n, result_path))\n\n return full_cli\n" }, { "alpha_fraction": 0.5625, "alphanum_fraction": 0.6151315569877625, "avg_line_length": 19.33333396911621, "blob_id": "6d8a0b528dc6a9019a7ba7420e4c8720608a9979", "content_id": "5100977fc46a8e05394e256009a8552b3629ea4e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 304, "license_type": "no_license", "max_line_length": 64, "num_lines": 15, "path": "/superc/pth/run.sh", "repo_name": "KrzysztofNawara/ar-parallel-solver", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n\nmodule load tools/impi/2018\nmodule load plgrid/tools/cmake/3.7.2\n\npushd \"$HOME\"/ar-lab1/cmake-build-release > /dev/null\n\nrm -f ./results/*\n\nif [ \"$1\" == \"seq\" ]; then\n ./seq -t $2 -n $3 $4\nelse\n mpiexec -ordered-output -prepend-rank ./$1 -o -t $2 -n $3 $4\nfi\npopd > /dev/null" }, { "alpha_fraction": 0.5983219742774963, "alphanum_fraction": 0.6062926054000854, "avg_line_length": 22.482759475708008, "blob_id": "b1a44fa26f0a5c40215a01ea4384f3e4eb091173", "content_id": "dd8526373b52b92d59fe36f1e59a935b2ac1a44e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 9535, "license_type": "no_license", "max_line_length": 109, "num_lines": 406, "path": "/src/parallel_lb.cpp", "repo_name": "KrzysztofNawara/ar-parallel-solver", "src_encoding": "UTF-8", "text": "\n#include <mpi.h>\n#include <exception>\n#include <iostream>\n#include <cmath>\n#include <cstring>\n#include \"shared.h\"\n\nconst int N_INVALID = -1;\n\nenum Neighbour {\n\tLEFT = 0,\n\tTOP = 1,\n\tRIGHT = 2,\n\tBOTTOM = 3,\n};\n\nclass ClusterManager {\npublic:\n\tClusterManager(const Coord N) : bitBucket(0) {\n\t\tMPI_Init(nullptr, nullptr);\n\t\tMPI_Comm_rank(comm, &nodeId);\n\t\tMPI_Comm_size(comm, &nodeCount);\n\n\t\tpartitioner = new Partitioner(nodeCount, 0.0, 1.0, N);\n\t\tsideLen = partitioner->get_nodes_grid_dimm();\n\t\tstd::tie(row, column) = partitioner->node_id_to_grid_pos(nodeId);\n\n\t\tinitNeighbours();\n\n\t\terr_log() << \"Cluster initialized successfully. I'm (\" << row << \",\" << column << \")\" << std::endl;\n\t}\n\n\t~ClusterManager() {\n\t\tdelete partitioner;\n\t\tMPI_Finalize();\n\t}\n\n\tPartitioner& getPartitioner() {return *partitioner;}\n\n\tint getNodeCount() { return nodeCount; }\n\tint getNodeId() { return nodeId; }\n\tstd::pair<NumType, NumType> getOffsets() { return partitioner->get_math_offset_node(row, column); };\n\tMPI_Comm getComm() { return comm; }\n\n\tstd::ostream& err_log() {\n\t\tstd::cerr << \"[\" << nodeId << \"] \";\n\t\treturn std::cerr;\n\t}\n\n\tstd::ostream& master_err_log() {\n\t\tif(nodeId == 0) {\n\t\t\tstd::cerr << \"[\" << nodeId << \"] \";\n\t\t\treturn std::cerr;\n\t\t} else {\n\t\t\treturn bitBucket;\n\t\t}\n\t}\n\n\tint* getNeighbours() {\n\t\treturn &neighbours[0];\n\t}\n\n\nprivate:\n\tconst static auto comm = MPI_COMM_WORLD;\n\n\tint nodeId;\n\tint nodeCount;\n\tint row;\n\tint column;\n\n\tPartitioner *partitioner;\n\n\tint sideLen;\n\tint neighbours[4];\n\n\tstd::ostream bitBucket;\n\n\tvoid initNeighbours() {\n\t\tif(row == 0) { neighbours[Neighbour::BOTTOM] = N_INVALID; }\n\t\telse { neighbours[Neighbour::BOTTOM] = nodeId-sideLen; }\n\n\t\tif(row == sideLen-1) { neighbours[Neighbour::TOP] = N_INVALID; }\n\t\telse { neighbours[Neighbour::TOP] = nodeId+sideLen; }\n\n\t\tif(column == 0) { neighbours[Neighbour::LEFT] = N_INVALID; }\n\t\telse { neighbours[Neighbour::LEFT] = nodeId-1; }\n\n\t\tif(column == sideLen-1) { neighbours[Neighbour::RIGHT] = N_INVALID; }\n\t\telse { neighbours[Neighbour::RIGHT] = nodeId+1; }\n\n\t\terr_log() << \"Neighbours: \"\n\t\t << \" LEFT: \" << neighbours[LEFT]\n\t\t << \" TOP: \" << neighbours[TOP]\n\t\t << \" RIGHT: \" << neighbours[RIGHT]\n\t\t << \" BOTTOM: \" << neighbours[BOTTOM] << std::endl;\n\t}\n};\n\n/*\n * Buffer exposal during async\n * I - start of innies calculation\n * O - start of outies calulations\n * s - swap, calculations finished for given iteration\n * out_r - recv, period of outer buffers exposal to the network\n * out_s - send, period of inner buffers exposal to the network\n *\n * I O s I O s\n * - out_r -| |-- out_r -|\n * - out_s -| |-- out_s -|\n *\n * receive (outer) - needed when calculating border values\n *\t* must be present when i-1 outies calculated\n *\t* can lie idle during subsequent outies calculation (assuming no memcpy)\n * send (inner)\n *\t* can be sent only when values calculated (happens right after outer become available)\n *\t* can be exposed only until outies from next iteration need to be calculated\n *\n * Memcpy impact?\n * Separate inner buffer: we don't have to wait with i+1 outies calculation until buffers are free (otherwise\n * we could overwrite data being sent)\n * Separate outer buffer: data required to carry out computations, but we can have a couple of spares with\n * outstanding receive request attached\n *\n * Single memcpied send buffer:\n * Allow to extend buffer exposure into outies calculation phase\n *\n * I O s I O s\n * - out_r -| |-- out_r -|\n * --out_m--|xxxx| memcpy-> out_s1\n * - out_s1 -----| |------------|\n *\n */\n\nclass Comms {\npublic:\n\tComms(const Coord innerLength) : innerLength(innerLength) {\n\t\treset();\n\t}\n\n\tvoid exchange(int targetId, NumType* sendBuffer, NumType* receiveBuffer) {\n\t\tMPI_Isend(sendBuffer, innerLength, NUM_MPI_DT, targetId, 1, MPI_COMM_WORLD, rq + nextId);\n\t\tMPI_Irecv(receiveBuffer, innerLength, NUM_MPI_DT, targetId, MPI_ANY_TAG, MPI_COMM_WORLD, rq + nextId + 1);\n\n\t\tnextId += 2;\n\t}\n\n\tvoid wait() {\n\t\tDL( \"NextId: \" << nextId )\n\t\tfor(int i = 0; i < nextId; i++) {\n\t\t\tint finished;\n\t\t\tMPI_Waitany(nextId, rq, &finished, MPI_STATUSES_IGNORE);\n\t\t\tDL( \"Finished \" << finished << \". Already done \" << i+1 )\n\t\t}\n\t\tDL( \"Wait finished\" )\n\t}\n\n\tvoid reset() {\n\t\tfor(int i = 0; i < RQ_COUNT; i++) {\n\t\t\trq[i] = MPI_REQUEST_NULL;\n\t\t}\n\t\tnextId = 0;\n\t}\n\nprivate:\n\tconst static int RQ_COUNT = 8;\n\tconst Coord innerLength;\n\tMPI_Request rq[RQ_COUNT];\n\tint nextId;\n};\n\n\nclass Workspace {\npublic:\n\tWorkspace(const Coord innerSize, const Coord borderWidth, ClusterManager& cm, Comms& comm)\n\t\t\t: innerSize(innerSize), cm(cm), comm(comm), borderWidth(borderWidth)\n\t{\n\t\touterSize = innerSize+2*borderWidth;\n\t\tmemorySize = outerSize*outerSize;\n\n\t\tneigh = cm.getNeighbours();\n\t\tinitialize_buffers();\n\t}\n\n\t~Workspace() {\n\t\tfreeBuffers();\n\t}\n\n\tvoid set_elf(const Coord x, const Coord y, const NumType value) {\n\t\t*elAddress(x, y, front) = value;\n\t}\n\n\tNumType elb(const Coord x, const Coord y) {\n\t\treturn *elAddress(x,y,back);\n\t}\n\n\tCoord getInnerLength() {return innerSize;}\n\n\tvoid swap(bool comms = true) {\n\t\tif(comms) {\n\t\t\tcopyInnerEdgesToBuffers();\n\n\t\t\tcomm.reset();\n\t\t\tfor(int i = 0; i < 4; i++) {\n\t\t\t\tauto iThNeigh = neigh[i];\n\t\t\t\tif(iThNeigh != N_INVALID) {\n\t\t\t\t\tcomm.exchange(iThNeigh, innerEdge[i], outerEdge[i]);\n\t\t\t\t}\n\t\t\t}\n\t\t\tcomm.wait();\n\n\t\t\tcopy_outer_buffer_to(front);\n\t\t}\n\n\t\tswapBuffers();\n\t}\n\nprivate:\n\tClusterManager& cm;\n\tComms& comm;\n\tint* neigh;\n\n\tconst Coord innerSize;\n\tCoord outerSize;\n\tCoord memorySize;\n\n\tconst Coord borderWidth;\n\n\t/* horizontal could be stored with main buffer, but for convenience both horizontals and\n\t * verticals are allocated separatelly (and writes mirrored) */\n\tNumType* innerEdge[4];\n\t/* all outer edges are allocated separatelly; their length is innerLength, not innerLength + 2 */\n\tNumType* outerEdge[4];\n\tNumType *front;\n\tNumType *back;\n\n\tvoid initialize_buffers() {\n\t\tfront = new NumType[memorySize];\n\t\tback = new NumType[memorySize];\n\n\t\tfor(Coord i = 0; i < memorySize; i++) {\n\t\t\tfront[i] = 0.0;\n\t\t\tback[i] = 0.0;\n\t\t}\n\n\t\t/* create inner buffer (as comm buffers) for */\n\t\tfor(int i = 0; i < 4; i++) {\n\t\t\tif(neigh[i] != N_INVALID) {\n\t\t\t\tinnerEdge[i] = new NumType[innerSize];\n\t\t\t\touterEdge[i] = new NumType[innerSize];\n\t\t\t} else {\n\t\t\t\tinnerEdge[i] = nullptr;\n\t\t\t\touterEdge[i] = nullptr;\n\t\t\t}\n\t\t}\n\t}\n\n\tvoid freeBuffers() {\n\t\tdelete[] front;\n\t\tdelete[] back;\n\n\t\tfor(int i = 0; i < 4; i++) {\n\t\t\tif(innerEdge != nullptr) {\n\t\t\t\tdelete[] innerEdge[i];\n\t\t\t\tdelete[] outerEdge[i];\n\t\t\t}\n\t\t}\n\t}\n\n\tNumType* elAddress(const Coord x, const Coord y, NumType* base) {\n\t\treturn base + outerSize*(borderWidth + x) + (borderWidth + y);\n\t}\n\n\tvoid swapBuffers() {\n\t\tNumType* tmp = front;\n\t\tfront = back;\n\t\tback = tmp;\n\t}\n\n\tvoid copyInnerEdgesToBuffers() {\n\t\t#define LOOP(EDGE, X, Y) \\\n\t\tif(neigh[EDGE] != N_INVALID) { \\\n\t\t\tfor(Coord i = 0; i < innerSize; i++) { \\\n\t\t\t\tinnerEdge[EDGE][i] = *elAddress(X,Y,front); \\\n\t\t\t} \\\n\t\t}\n\n\t\tLOOP(TOP, i, innerSize-1)\n\t\tLOOP(BOTTOM, i, 0)\n\t\tLOOP(LEFT, 0, i)\n\t\tLOOP(RIGHT, innerSize-1, i)\n\n\t\t#undef LOOP\n\t}\n\n\tvoid copy_outer_buffer_to(NumType *target) {\n\t\t#define LOOP(EDGE, X, Y) \\\n\t\tif(neigh[EDGE] != N_INVALID) { \\\n\t\t\tfor(Coord i = 0; i < innerSize; i++) { \\\n\t\t\t\t*elAddress(X,Y,target) = outerEdge[EDGE][i]; \\\n\t\t\t} \\\n\t\t}\n\n\t\tLOOP(TOP, i, innerSize)\n\t\tLOOP(BOTTOM, i, -1)\n\t\tLOOP(LEFT, -1, i)\n\t\tLOOP(RIGHT, innerSize, i)\n\n\t\t#undef LOOP\n\t}\n};\n\nstd::string filenameGenerator(int nodeId) {\n\tstd::ostringstream oss;\n\toss << \"./results/\" << nodeId << \"_t\";\n\treturn oss.str();\n}\n\nint main(int argc, char **argv) {\n\tstd::cerr << __FILE__ << std::endl;\n\n\tauto conf = parse_cli(argc, argv);\n\n\tClusterManager cm(conf.N);\n\tauto n_slice = cm.getPartitioner().get_n_slice();\n\tNumType x_offset, y_offset;\n\tstd::tie(x_offset, y_offset) = cm.getOffsets();\n\tauto h = cm.getPartitioner().get_h();\n\n\tComms comm(n_slice);\n\tWorkspace w(n_slice, 1, cm, comm);\n\n\tFileDumper<Workspace> d(filenameGenerator(cm.getNodeId()),\n\t n_slice,\n\t x_offset,\n\t y_offset,\n\t h,\n\t get_freq_sel(conf.timeSteps));\n\n\tTimer timer;\n\n\tMPI_Barrier(cm.getComm());\n\ttimer.start();\n\n\tfor(Coord x_idx = 0; x_idx < n_slice; x_idx++) {\n\t\tfor(Coord y_idx = 0; y_idx < n_slice; y_idx++) {\n\t\t\tauto x = x_offset + x_idx*h;\n\t\t\tauto y = y_offset + y_idx*h;\n\t\t\tauto val = f(x,y);\n\t\t\tw.set_elf(x_idx,y_idx, val);\n\n\t\t\t#ifdef DEBUG\n\t\t\tstd::cerr << \"[\" << x_idx << \",\" << y_idx <<\"] \"\n\t\t\t << \"(\" << x << \",\" << y << \") -> \"\n\t\t\t << val << std::endl;\n\t\t\t#endif\n\t\t}\n\t}\n\n\tw.swap();\n\n\tfor(TimeStepCount ts = 0; ts < conf.timeSteps; ts++) {\n\t\tDL( \"Entering timestep loop, ts = \" << ts )\n\n\t\tfor(Coord x_idx = 0; x_idx < n_slice; x_idx++) {\n\t\t\tDL( \"Entering X loop, x = \" << x_idx )\n\n\t\t\tfor(Coord y_idx = 0; y_idx < n_slice; y_idx++) {\n\t\t\t\tDL( \"Entering Y loop, x y \" << y_idx )\n\n\t\t\t\tauto eq_val = equation(\n\t\t\t\t\t\tw.elb(x_idx - 1, y_idx),\n\t\t\t\t\t\tw.elb(x_idx, y_idx - 1),\n\t\t\t\t\t\tw.elb(x_idx + 1, y_idx),\n\t\t\t\t\t\tw.elb(x_idx, y_idx + 1)\n\t\t\t\t);\n\n\t\t\t\tw.set_elf(x_idx, y_idx, eq_val);\n\t\t\t}\n\t\t}\n\n\t\tDL( \"Before swap, ts = \" << ts )\n\n\t\tw.swap();\n\n\t\tDL( \"Entering file dump\" )\n\n\t\tif (unlikely(conf.outputEnabled)) {\n\t\t\td.dumpBackbuffer(w, ts);\n\t\t}\n\n\t\tDL( \"After dump, ts = \" << ts )\n\t}\n\n\tMPI_Barrier(cm.getComm());\n\tauto duration = timer.stop();\n\n\tif(cm.getNodeId() == 0) {\n\t\tprint_result(\"parallel_lb\", cm.getNodeCount(), duration, conf);\n\t\tstd::cerr << ((double)duration)/1000000000 << \" s\" << std::endl;\n\t}\n\n\tDL( \"Terminating\" )\n\n\treturn 0;\n}\n" } ]
20
nate-ge/json-file-dataset
https://github.com/nate-ge/json-file-dataset
277031414151924f8e2f2d1179ef06bf838da1b0
6db0c3d3d348cbc72cd254c25ab5ac46ec1b6195
50fbbfd1d2ea8dab6abe9796f53b57dd474b7e18
refs/heads/main
2023-07-02T00:24:43.938204
2021-08-18T08:58:13
2021-08-18T08:58:13
389,479,300
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.48494982719421387, "alphanum_fraction": 0.5117056965827942, "avg_line_length": 18.066667556762695, "blob_id": "835748b557ddee1a317a709f4491d82808c2d529", "content_id": "157ef517126b5b562e31ab304f0364c3917fd3d8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "YAML", "length_bytes": 299, "license_type": "no_license", "max_line_length": 47, "num_lines": 15, "path": "/container builder/redis-attrs/docker-compose.yml", "repo_name": "nate-ge/json-file-dataset", "src_encoding": "UTF-8", "text": "version: \"3\"\r\n\r\nservices:\r\n redis:\r\n image: redis:6.0.7\r\n container_name: redis_attrs\r\n ports:\r\n - 80:80\r\n volumes:\r\n - ./redis-command:/data/redis-command\r\n command:\r\n \"--loglevel debug\"\r\n logging:\r\n options:\r\n tag: \"{{.ImageName}}/{{.Name}}/{{.ID}}\"" }, { "alpha_fraction": 0.5609195232391357, "alphanum_fraction": 0.5770114660263062, "avg_line_length": 29.214284896850586, "blob_id": "edecb3785c8771ed01fa019c9836d73f8703bcba", "content_id": "28c89b99ce2d027e63e36209aeb9677e6fd6c532", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 435, "license_type": "no_license", "max_line_length": 74, "num_lines": 14, "path": "/container builder/ubuntu-attrs/OperationMaker.py", "repo_name": "nate-ge/json-file-dataset", "src_encoding": "UTF-8", "text": "filename = \"operation-command\"\r\nwith open(filename, 'w') as file_object:\r\n num = 20\r\n for i in range(num):\r\n file_object.write(\"touch test{0}\\n\".format(i))\r\n\r\n for i in range(num):\r\n file_object.write(\"echo \\\"Hello world{0}\\\" > test{0}\\n\".format(i))\r\n\r\n for i in range(num):\r\n file_object.write(\"cat test{0}\\n\".format(i))\r\n\r\n for i in range(num):\r\n file_object.write(\"rm test{0}\\n\".format(i))" }, { "alpha_fraction": 0.40316206216812134, "alphanum_fraction": 0.4584980309009552, "avg_line_length": 17, "blob_id": "b312393dcc6aaeb108c9228f3465d7fead57a454", "content_id": "727a4f36923c5b85f168a3b484c48562fd574cc5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "YAML", "length_bytes": 253, "license_type": "no_license", "max_line_length": 47, "num_lines": 13, "path": "/container builder/mongo-attrs/docker-compose.yml", "repo_name": "nate-ge/json-file-dataset", "src_encoding": "UTF-8", "text": "version: '3'\r\nservices:\r\n mongodb:\r\n image: mongo:4.4.0\r\n volumes:\r\n - ./test.sql:/test.sql\r\n ports:\r\n - 27019:27017\r\n command:\r\n \"-vvvvv\"\r\n logging:\r\n options:\r\n tag: \"{{.ImageName}}/{{.Name}}/{{.ID}}\"\r\n " }, { "alpha_fraction": 0.5270588397979736, "alphanum_fraction": 0.5458823442459106, "avg_line_length": 20.473684310913086, "blob_id": "58667ff2a7e0e38a88c130150e1a31b91c6e9d5b", "content_id": "2c1d6f69cada187a8495ad2fa598ba16e2a2f34c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "YAML", "length_bytes": 425, "license_type": "no_license", "max_line_length": 47, "num_lines": 19, "path": "/container builder/postgres-attrs/docker-compose.yml", "repo_name": "nate-ge/json-file-dataset", "src_encoding": "UTF-8", "text": "version: \"3\"\r\n\r\nservices:\r\n postgres:\r\n image: postgres:12.4\r\n container_name: postgresql_attrs\r\n command:\r\n \"-c log_statement=all\"\r\n environment:\r\n POSTGRES_USER: postgres\r\n POSTGRES_PASSWORD: password\r\n ports:\r\n - 80:80\r\n volumes:\r\n - ./data:/var/lib/postgresql/data\r\n - ./test.sql:/test.sql\r\n logging:\r\n options:\r\n tag: \"{{.ImageName}}/{{.Name}}/{{.ID}}\"" }, { "alpha_fraction": 0.5808823704719543, "alphanum_fraction": 0.5970588326454163, "avg_line_length": 38.117645263671875, "blob_id": "da77fca152437f12328447abdfeb4d663738b626", "content_id": "6f5ebbf4b45ccf6e8e9476fdbccd50f81b89f48d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 680, "license_type": "no_license", "max_line_length": 89, "num_lines": 17, "path": "/container builder/mysql-attrs/sqlmaker.py", "repo_name": "nate-ge/json-file-dataset", "src_encoding": "UTF-8", "text": "filename = \"test.sql\"\r\nwith open(filename, 'w') as file_object:\r\n file_object.write(\"CREATE TABLE test (id int primary key, num int);\\n\")\r\n num = 1000\r\n for i in range(num):\r\n file_object.write(\"INSERT INTO test VALUES ({0}, {1});\\n\".format(i, i * 2))\r\n file_object.write(\"SELECT * FROM test;\\n\")\r\n\r\n for i in range(num):\r\n file_object.write(\"UPDATE test SET num = {0} WHERE id = {1};\\n\".format(i * 3, i))\r\n file_object.write(\"SELECT * FROM test;\\n\")\r\n\r\n for i in range(num):\r\n file_object.write(\"DELETE FROM test WHERE id = {0};\\n\".format(i))\r\n file_object.write(\"SELECT * FROM test;\\n\")\r\n\r\n file_object.write(\"DROP TABLE test;\\n\")" }, { "alpha_fraction": 0.7125505805015564, "alphanum_fraction": 0.7287449240684509, "avg_line_length": 17.33333396911621, "blob_id": "ba47fc87ecddc3be599c80f189116ea7238d4c44", "content_id": "8fab15513cdd08f254a2001df530aaea7522c59c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "SQL", "length_bytes": 502, "license_type": "no_license", "max_line_length": 46, "num_lines": 27, "path": "/container builder/mysql/test.sql", "repo_name": "nate-ge/json-file-dataset", "src_encoding": "UTF-8", "text": "CREATE DATABASE testdb;\nUSE testdb;\n\nCREATE TABLE test_tbl(\nid INT NOT NULL AUTO_INCREMENT,\ntitle VARCHAR(100) NOT NULL,\nauthor VARCHAR(40) NOT NULL,\nsubmission_date DATE,\nPRIMARY KEY ( id )\n)ENGINE=InnoDB DEFAULT CHARSET=utf8;\n\nINSERT INTO test_tbl\n(title, author, submission_date)\nVALUES\n(\"学习 PHP\", \"GS\", NOW());\n\nSELECT * FROM test_tbl;\n\nUPDATE test_tbl SET title='学习 C++' WHERE id=1;\n\nSELECT * FROM test_tbl;\n\nDELETE FROM test_tbl WHERE id=1;\n\nSELECT * FROM test_tbl;\n\nDROP DATABASE testdb;" }, { "alpha_fraction": 0.42168673872947693, "alphanum_fraction": 0.5060241222381592, "avg_line_length": 14.800000190734863, "blob_id": "caa047cb31ed7494f8047f0a0e492e6fb905da2e", "content_id": "6bfb22c7ef2f3d7c3f18524909639e4983946ce6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "YAML", "length_bytes": 166, "license_type": "no_license", "max_line_length": 28, "num_lines": 10, "path": "/container builder/mongo/docker-compose.yml", "repo_name": "nate-ge/json-file-dataset", "src_encoding": "UTF-8", "text": "version: '3'\r\nservices:\r\n mongodb:\r\n image: mongo:4.4.0\r\n volumes:\r\n - ./test.sql:/test.sql\r\n ports:\r\n - 27018:27017\r\n command:\r\n \"-vvvvv\"" }, { "alpha_fraction": 0.6933333277702332, "alphanum_fraction": 0.7333333492279053, "avg_line_length": 24, "blob_id": "f8da38d41ddb093d72098344f46d64546f4ccd0b", "content_id": "c232f257a665ad611bb0a3e5378aa5635faa377f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 75, "license_type": "no_license", "max_line_length": 43, "num_lines": 3, "path": "/container builder/traefik-attrs/visit.py", "repo_name": "nate-ge/json-file-dataset", "src_encoding": "UTF-8", "text": "import os\nfor i in range(200):\n os.system(\"curl whoami.docker.localhost\")\n" }, { "alpha_fraction": 0.6153846383094788, "alphanum_fraction": 0.7076923251152039, "avg_line_length": 20.66666603088379, "blob_id": "f2de4871f5299c920e831447e4607da05be290bd", "content_id": "8015adf61d1155646cee9aac24e4011d1c7b2e20", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 65, "license_type": "no_license", "max_line_length": 32, "num_lines": 3, "path": "/container builder/nginx-attrs/visit.py", "repo_name": "nate-ge/json-file-dataset", "src_encoding": "UTF-8", "text": "import os\nfor i in range(2000):\n os.system(\"curl localhost:90\")\n" }, { "alpha_fraction": 0.5268292427062988, "alphanum_fraction": 0.5487805008888245, "avg_line_length": 27.428571701049805, "blob_id": "621af337aa37082807b0fd3afcb52d3d749823b3", "content_id": "1fb3d0fc25c3efc5af074444f78b8e650e506eb3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 410, "license_type": "no_license", "max_line_length": 59, "num_lines": 14, "path": "/container builder/redis-attrs/redismaker.py", "repo_name": "nate-ge/json-file-dataset", "src_encoding": "UTF-8", "text": "filename = \"redis-command\"\r\nwith open(filename, 'w') as file_object:\r\n num = 20\r\n for i in range(num):\r\n file_object.write(\"SET {0} {1}\\n\".format(i, i * 2))\r\n\r\n for i in range(num):\r\n file_object.write(\"GET {0}\\n\".format(i))\r\n\r\n for i in range(num):\r\n file_object.write(\"EXISTS {0}\\n\".format(i * 2))\r\n\r\n for i in range(num):\r\n file_object.write(\"DEL {0}\\n\".format(i))" }, { "alpha_fraction": 0.5669013857841492, "alphanum_fraction": 0.5727699398994446, "avg_line_length": 42.94736862182617, "blob_id": "c0e970e690ffb67447a70dcbea53fe7cf7f5ca15", "content_id": "4346b4278d8467337d604363bbd2c475b1496cd1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 852, "license_type": "no_license", "max_line_length": 133, "num_lines": 19, "path": "/container builder/mongo/sqlmaker.py", "repo_name": "nate-ge/json-file-dataset", "src_encoding": "UTF-8", "text": "filename = \"test.sql\"\r\nwith open(filename, 'w') as file_object:\r\n file_object.write(\"use testdb\\n\")\r\n file_object.write(\"db.createCollection(\\\"testColl\\\")\\n\")\r\n num = 10\r\n for i in range(num):\r\n file_object.write(\"db.testColl.insert({title: '\" + str(i) + \"',description: '\" + str(i*2) + \"', likes: \" + str(i*3) + \"})\\n\")\r\n file_object.write(\"db.testColl.find().pretty()\\n\")\r\n\r\n for i in range(num):\r\n file_object.write(\"db.testColl.update({'title':'\" + str(i) + \"'},{$set:{'title':'\" + str(i+1) + \"'}})\\n\")\r\n file_object.write(\"db.testColl.find().pretty()\\n\")\r\n\r\n for i in range(num):\r\n file_object.write(\"db.testColl.remove({'title':'\" + str(i) + \"'})\\n\")\r\n file_object.write(\"db.testColl.find().pretty()\\n\")\r\n\r\n file_object.write(\"db.testColl.drop()\\n\")\r\n file_object.write(\"db.dropDatabase()\\n\")" }, { "alpha_fraction": 0.5729111433029175, "alphanum_fraction": 0.7136382460594177, "avg_line_length": 36.22761917114258, "blob_id": "353b61f602c6a0313e4ae3baa38748ebd8d8a426", "content_id": "7b03eb8eef29c23244dfe05560139dfcf598429f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "SQL", "length_bytes": 111869, "license_type": "no_license", "max_line_length": 48, "num_lines": 3005, "path": "/container builder/postgres-attrs/test.sql", "repo_name": "nate-ge/json-file-dataset", "src_encoding": "UTF-8", "text": "CREATE TABLE test (id int primary key, num int);\nINSERT INTO test VALUES (0, 0);\nINSERT INTO test VALUES (1, 2);\nINSERT INTO test VALUES (2, 4);\nINSERT INTO test VALUES (3, 6);\nINSERT INTO test VALUES (4, 8);\nINSERT INTO test VALUES (5, 10);\nINSERT INTO test VALUES (6, 12);\nINSERT INTO test VALUES (7, 14);\nINSERT INTO test VALUES (8, 16);\nINSERT INTO test VALUES (9, 18);\nINSERT INTO test VALUES (10, 20);\nINSERT INTO test VALUES (11, 22);\nINSERT INTO test VALUES (12, 24);\nINSERT INTO test VALUES (13, 26);\nINSERT INTO test VALUES (14, 28);\nINSERT INTO test VALUES (15, 30);\nINSERT INTO test VALUES (16, 32);\nINSERT INTO test VALUES (17, 34);\nINSERT INTO test VALUES (18, 36);\nINSERT INTO test VALUES (19, 38);\nINSERT INTO test VALUES (20, 40);\nINSERT INTO test VALUES (21, 42);\nINSERT INTO test VALUES (22, 44);\nINSERT INTO test VALUES (23, 46);\nINSERT INTO test VALUES (24, 48);\nINSERT INTO test VALUES (25, 50);\nINSERT INTO test VALUES (26, 52);\nINSERT INTO test VALUES (27, 54);\nINSERT INTO test VALUES (28, 56);\nINSERT INTO test VALUES (29, 58);\nINSERT INTO test VALUES (30, 60);\nINSERT INTO test VALUES (31, 62);\nINSERT INTO test VALUES (32, 64);\nINSERT INTO test VALUES (33, 66);\nINSERT INTO test VALUES (34, 68);\nINSERT INTO test VALUES (35, 70);\nINSERT INTO test VALUES (36, 72);\nINSERT INTO test VALUES (37, 74);\nINSERT INTO test VALUES (38, 76);\nINSERT INTO test VALUES (39, 78);\nINSERT INTO test VALUES (40, 80);\nINSERT INTO test VALUES (41, 82);\nINSERT INTO test VALUES (42, 84);\nINSERT INTO test VALUES (43, 86);\nINSERT INTO test VALUES (44, 88);\nINSERT INTO test VALUES (45, 90);\nINSERT INTO test VALUES (46, 92);\nINSERT INTO test VALUES (47, 94);\nINSERT INTO test VALUES (48, 96);\nINSERT INTO test VALUES (49, 98);\nINSERT INTO test VALUES (50, 100);\nINSERT INTO test VALUES (51, 102);\nINSERT INTO test VALUES (52, 104);\nINSERT INTO test VALUES (53, 106);\nINSERT INTO test VALUES (54, 108);\nINSERT INTO test VALUES (55, 110);\nINSERT INTO test VALUES (56, 112);\nINSERT INTO test VALUES (57, 114);\nINSERT INTO test VALUES (58, 116);\nINSERT INTO test VALUES (59, 118);\nINSERT INTO test VALUES (60, 120);\nINSERT INTO test VALUES (61, 122);\nINSERT INTO test VALUES (62, 124);\nINSERT INTO test VALUES (63, 126);\nINSERT INTO test VALUES (64, 128);\nINSERT INTO test VALUES (65, 130);\nINSERT INTO test VALUES (66, 132);\nINSERT INTO test VALUES (67, 134);\nINSERT INTO test VALUES (68, 136);\nINSERT INTO test VALUES (69, 138);\nINSERT INTO test VALUES (70, 140);\nINSERT INTO test VALUES (71, 142);\nINSERT INTO test VALUES (72, 144);\nINSERT INTO test VALUES (73, 146);\nINSERT INTO test VALUES (74, 148);\nINSERT INTO test VALUES (75, 150);\nINSERT INTO test VALUES (76, 152);\nINSERT INTO test VALUES (77, 154);\nINSERT INTO test VALUES (78, 156);\nINSERT INTO test VALUES (79, 158);\nINSERT INTO test VALUES (80, 160);\nINSERT INTO test VALUES (81, 162);\nINSERT INTO test VALUES (82, 164);\nINSERT INTO test VALUES (83, 166);\nINSERT INTO test VALUES (84, 168);\nINSERT INTO test VALUES (85, 170);\nINSERT INTO test VALUES (86, 172);\nINSERT INTO test VALUES (87, 174);\nINSERT INTO test VALUES (88, 176);\nINSERT INTO test VALUES (89, 178);\nINSERT INTO test VALUES (90, 180);\nINSERT INTO test VALUES (91, 182);\nINSERT INTO test VALUES (92, 184);\nINSERT INTO test VALUES (93, 186);\nINSERT INTO test VALUES (94, 188);\nINSERT INTO test VALUES (95, 190);\nINSERT INTO test VALUES (96, 192);\nINSERT INTO test VALUES (97, 194);\nINSERT INTO test VALUES (98, 196);\nINSERT INTO test VALUES (99, 198);\nINSERT INTO test VALUES (100, 200);\nINSERT INTO test VALUES (101, 202);\nINSERT INTO test VALUES (102, 204);\nINSERT INTO test VALUES (103, 206);\nINSERT INTO test VALUES (104, 208);\nINSERT INTO test VALUES (105, 210);\nINSERT INTO test VALUES (106, 212);\nINSERT INTO test VALUES (107, 214);\nINSERT INTO test VALUES (108, 216);\nINSERT INTO test VALUES (109, 218);\nINSERT INTO test VALUES (110, 220);\nINSERT INTO test VALUES (111, 222);\nINSERT INTO test VALUES (112, 224);\nINSERT INTO test VALUES (113, 226);\nINSERT INTO test VALUES (114, 228);\nINSERT INTO test VALUES (115, 230);\nINSERT INTO test VALUES (116, 232);\nINSERT INTO test VALUES (117, 234);\nINSERT INTO test VALUES (118, 236);\nINSERT INTO test VALUES (119, 238);\nINSERT INTO test VALUES (120, 240);\nINSERT INTO test VALUES (121, 242);\nINSERT INTO test VALUES (122, 244);\nINSERT INTO test VALUES (123, 246);\nINSERT INTO test VALUES (124, 248);\nINSERT INTO test VALUES (125, 250);\nINSERT INTO test VALUES (126, 252);\nINSERT INTO test VALUES (127, 254);\nINSERT INTO test VALUES (128, 256);\nINSERT INTO test VALUES (129, 258);\nINSERT INTO test VALUES (130, 260);\nINSERT INTO test VALUES (131, 262);\nINSERT INTO test VALUES (132, 264);\nINSERT INTO test VALUES (133, 266);\nINSERT INTO test VALUES (134, 268);\nINSERT INTO test VALUES (135, 270);\nINSERT INTO test VALUES (136, 272);\nINSERT INTO test VALUES (137, 274);\nINSERT INTO test VALUES (138, 276);\nINSERT INTO test VALUES (139, 278);\nINSERT INTO test VALUES (140, 280);\nINSERT INTO test VALUES (141, 282);\nINSERT INTO test VALUES (142, 284);\nINSERT INTO test VALUES (143, 286);\nINSERT INTO test VALUES (144, 288);\nINSERT INTO test VALUES (145, 290);\nINSERT INTO test VALUES (146, 292);\nINSERT INTO test VALUES (147, 294);\nINSERT INTO test VALUES (148, 296);\nINSERT INTO test VALUES (149, 298);\nINSERT INTO test VALUES (150, 300);\nINSERT INTO test VALUES (151, 302);\nINSERT INTO test VALUES (152, 304);\nINSERT INTO test VALUES (153, 306);\nINSERT INTO test VALUES (154, 308);\nINSERT INTO test VALUES (155, 310);\nINSERT INTO test VALUES (156, 312);\nINSERT INTO test VALUES (157, 314);\nINSERT INTO test VALUES (158, 316);\nINSERT INTO test VALUES (159, 318);\nINSERT INTO test VALUES (160, 320);\nINSERT INTO test VALUES (161, 322);\nINSERT INTO test VALUES (162, 324);\nINSERT INTO test VALUES (163, 326);\nINSERT INTO test VALUES (164, 328);\nINSERT INTO test VALUES (165, 330);\nINSERT INTO test VALUES (166, 332);\nINSERT INTO test VALUES (167, 334);\nINSERT INTO test VALUES (168, 336);\nINSERT INTO test VALUES (169, 338);\nINSERT INTO test VALUES (170, 340);\nINSERT INTO test VALUES (171, 342);\nINSERT INTO test VALUES (172, 344);\nINSERT INTO test VALUES (173, 346);\nINSERT INTO test VALUES (174, 348);\nINSERT INTO test VALUES (175, 350);\nINSERT INTO test VALUES (176, 352);\nINSERT INTO test VALUES (177, 354);\nINSERT INTO test VALUES (178, 356);\nINSERT INTO test VALUES (179, 358);\nINSERT INTO test VALUES (180, 360);\nINSERT INTO test VALUES (181, 362);\nINSERT INTO test VALUES (182, 364);\nINSERT INTO test VALUES (183, 366);\nINSERT INTO test VALUES (184, 368);\nINSERT INTO test VALUES (185, 370);\nINSERT INTO test VALUES (186, 372);\nINSERT INTO test VALUES (187, 374);\nINSERT INTO test VALUES (188, 376);\nINSERT INTO test VALUES (189, 378);\nINSERT INTO test VALUES (190, 380);\nINSERT INTO test VALUES (191, 382);\nINSERT INTO test VALUES (192, 384);\nINSERT INTO test VALUES (193, 386);\nINSERT INTO test VALUES (194, 388);\nINSERT INTO test VALUES (195, 390);\nINSERT INTO test VALUES (196, 392);\nINSERT INTO test VALUES (197, 394);\nINSERT INTO test VALUES (198, 396);\nINSERT INTO test VALUES (199, 398);\nINSERT INTO test VALUES (200, 400);\nINSERT INTO test VALUES (201, 402);\nINSERT INTO test VALUES (202, 404);\nINSERT INTO test VALUES (203, 406);\nINSERT INTO test VALUES (204, 408);\nINSERT INTO test VALUES (205, 410);\nINSERT INTO test VALUES (206, 412);\nINSERT INTO test VALUES (207, 414);\nINSERT INTO test VALUES (208, 416);\nINSERT INTO test VALUES (209, 418);\nINSERT INTO test VALUES (210, 420);\nINSERT INTO test VALUES (211, 422);\nINSERT INTO test VALUES (212, 424);\nINSERT INTO test VALUES (213, 426);\nINSERT INTO test VALUES (214, 428);\nINSERT INTO test VALUES (215, 430);\nINSERT INTO test VALUES (216, 432);\nINSERT INTO test VALUES (217, 434);\nINSERT INTO test VALUES (218, 436);\nINSERT INTO test VALUES (219, 438);\nINSERT INTO test VALUES (220, 440);\nINSERT INTO test VALUES (221, 442);\nINSERT INTO test VALUES (222, 444);\nINSERT INTO test VALUES (223, 446);\nINSERT INTO test VALUES (224, 448);\nINSERT INTO test VALUES (225, 450);\nINSERT INTO test VALUES (226, 452);\nINSERT INTO test VALUES (227, 454);\nINSERT INTO test VALUES (228, 456);\nINSERT INTO test VALUES (229, 458);\nINSERT INTO test VALUES (230, 460);\nINSERT INTO test VALUES (231, 462);\nINSERT INTO test VALUES (232, 464);\nINSERT INTO test VALUES (233, 466);\nINSERT INTO test VALUES (234, 468);\nINSERT INTO test VALUES (235, 470);\nINSERT INTO test VALUES (236, 472);\nINSERT INTO test VALUES (237, 474);\nINSERT INTO test VALUES (238, 476);\nINSERT INTO test VALUES (239, 478);\nINSERT INTO test VALUES (240, 480);\nINSERT INTO test VALUES (241, 482);\nINSERT INTO test VALUES (242, 484);\nINSERT INTO test VALUES (243, 486);\nINSERT INTO test VALUES (244, 488);\nINSERT INTO test VALUES (245, 490);\nINSERT INTO test VALUES (246, 492);\nINSERT INTO test VALUES (247, 494);\nINSERT INTO test VALUES (248, 496);\nINSERT INTO test VALUES (249, 498);\nINSERT INTO test VALUES (250, 500);\nINSERT INTO test VALUES (251, 502);\nINSERT INTO test VALUES (252, 504);\nINSERT INTO test VALUES (253, 506);\nINSERT INTO test VALUES (254, 508);\nINSERT INTO test VALUES (255, 510);\nINSERT INTO test VALUES (256, 512);\nINSERT INTO test VALUES (257, 514);\nINSERT INTO test VALUES (258, 516);\nINSERT INTO test VALUES (259, 518);\nINSERT INTO test VALUES (260, 520);\nINSERT INTO test VALUES (261, 522);\nINSERT INTO test VALUES (262, 524);\nINSERT INTO test VALUES (263, 526);\nINSERT INTO test VALUES (264, 528);\nINSERT INTO test VALUES (265, 530);\nINSERT INTO test VALUES (266, 532);\nINSERT INTO test VALUES (267, 534);\nINSERT INTO test VALUES (268, 536);\nINSERT INTO test VALUES (269, 538);\nINSERT INTO test VALUES (270, 540);\nINSERT INTO test VALUES (271, 542);\nINSERT INTO test VALUES (272, 544);\nINSERT INTO test VALUES (273, 546);\nINSERT INTO test VALUES (274, 548);\nINSERT INTO test VALUES (275, 550);\nINSERT INTO test VALUES (276, 552);\nINSERT INTO test VALUES (277, 554);\nINSERT INTO test VALUES (278, 556);\nINSERT INTO test VALUES (279, 558);\nINSERT INTO test VALUES (280, 560);\nINSERT INTO test VALUES (281, 562);\nINSERT INTO test VALUES (282, 564);\nINSERT INTO test VALUES (283, 566);\nINSERT INTO test VALUES (284, 568);\nINSERT INTO test VALUES (285, 570);\nINSERT INTO test VALUES (286, 572);\nINSERT INTO test VALUES (287, 574);\nINSERT INTO test VALUES (288, 576);\nINSERT INTO test VALUES (289, 578);\nINSERT INTO test VALUES (290, 580);\nINSERT INTO test VALUES (291, 582);\nINSERT INTO test VALUES (292, 584);\nINSERT INTO test VALUES (293, 586);\nINSERT INTO test VALUES (294, 588);\nINSERT INTO test VALUES (295, 590);\nINSERT INTO test VALUES (296, 592);\nINSERT INTO test VALUES (297, 594);\nINSERT INTO test VALUES (298, 596);\nINSERT INTO test VALUES (299, 598);\nINSERT INTO test VALUES (300, 600);\nINSERT INTO test VALUES (301, 602);\nINSERT INTO test VALUES (302, 604);\nINSERT INTO test VALUES (303, 606);\nINSERT INTO test VALUES (304, 608);\nINSERT INTO test VALUES (305, 610);\nINSERT INTO test VALUES (306, 612);\nINSERT INTO test VALUES (307, 614);\nINSERT INTO test VALUES (308, 616);\nINSERT INTO test VALUES (309, 618);\nINSERT INTO test VALUES (310, 620);\nINSERT INTO test VALUES (311, 622);\nINSERT INTO test VALUES (312, 624);\nINSERT INTO test VALUES (313, 626);\nINSERT INTO test VALUES (314, 628);\nINSERT INTO test VALUES (315, 630);\nINSERT INTO test VALUES (316, 632);\nINSERT INTO test VALUES (317, 634);\nINSERT INTO test VALUES (318, 636);\nINSERT INTO test VALUES (319, 638);\nINSERT INTO test VALUES (320, 640);\nINSERT INTO test VALUES (321, 642);\nINSERT INTO test VALUES (322, 644);\nINSERT INTO test VALUES (323, 646);\nINSERT INTO test VALUES (324, 648);\nINSERT INTO test VALUES (325, 650);\nINSERT INTO test VALUES (326, 652);\nINSERT INTO test VALUES (327, 654);\nINSERT INTO test VALUES (328, 656);\nINSERT INTO test VALUES (329, 658);\nINSERT INTO test VALUES (330, 660);\nINSERT INTO test VALUES (331, 662);\nINSERT INTO test VALUES (332, 664);\nINSERT INTO test VALUES (333, 666);\nINSERT INTO test VALUES (334, 668);\nINSERT INTO test VALUES (335, 670);\nINSERT INTO test VALUES (336, 672);\nINSERT INTO test VALUES (337, 674);\nINSERT INTO test VALUES (338, 676);\nINSERT INTO test VALUES (339, 678);\nINSERT INTO test VALUES (340, 680);\nINSERT INTO test VALUES (341, 682);\nINSERT INTO test VALUES (342, 684);\nINSERT INTO test VALUES (343, 686);\nINSERT INTO test VALUES (344, 688);\nINSERT INTO test VALUES (345, 690);\nINSERT INTO test VALUES (346, 692);\nINSERT INTO test VALUES (347, 694);\nINSERT INTO test VALUES (348, 696);\nINSERT INTO test VALUES (349, 698);\nINSERT INTO test VALUES (350, 700);\nINSERT INTO test VALUES (351, 702);\nINSERT INTO test VALUES (352, 704);\nINSERT INTO test VALUES (353, 706);\nINSERT INTO test VALUES (354, 708);\nINSERT INTO test VALUES (355, 710);\nINSERT INTO test VALUES (356, 712);\nINSERT INTO test VALUES (357, 714);\nINSERT INTO test VALUES (358, 716);\nINSERT INTO test VALUES (359, 718);\nINSERT INTO test VALUES (360, 720);\nINSERT INTO test VALUES (361, 722);\nINSERT INTO test VALUES (362, 724);\nINSERT INTO test VALUES (363, 726);\nINSERT INTO test VALUES (364, 728);\nINSERT INTO test VALUES (365, 730);\nINSERT INTO test VALUES (366, 732);\nINSERT INTO test VALUES (367, 734);\nINSERT INTO test VALUES (368, 736);\nINSERT INTO test VALUES (369, 738);\nINSERT INTO test VALUES (370, 740);\nINSERT INTO test VALUES (371, 742);\nINSERT INTO test VALUES (372, 744);\nINSERT INTO test VALUES (373, 746);\nINSERT INTO test VALUES (374, 748);\nINSERT INTO test VALUES (375, 750);\nINSERT INTO test VALUES (376, 752);\nINSERT INTO test VALUES (377, 754);\nINSERT INTO test VALUES (378, 756);\nINSERT INTO test VALUES (379, 758);\nINSERT INTO test VALUES (380, 760);\nINSERT INTO test VALUES (381, 762);\nINSERT INTO test VALUES (382, 764);\nINSERT INTO test VALUES (383, 766);\nINSERT INTO test VALUES (384, 768);\nINSERT INTO test VALUES (385, 770);\nINSERT INTO test VALUES (386, 772);\nINSERT INTO test VALUES (387, 774);\nINSERT INTO test VALUES (388, 776);\nINSERT INTO test VALUES (389, 778);\nINSERT INTO test VALUES (390, 780);\nINSERT INTO test VALUES (391, 782);\nINSERT INTO test VALUES (392, 784);\nINSERT INTO test VALUES (393, 786);\nINSERT INTO test VALUES (394, 788);\nINSERT INTO test VALUES (395, 790);\nINSERT INTO test VALUES (396, 792);\nINSERT INTO test VALUES (397, 794);\nINSERT INTO test VALUES (398, 796);\nINSERT INTO test VALUES (399, 798);\nINSERT INTO test VALUES (400, 800);\nINSERT INTO test VALUES (401, 802);\nINSERT INTO test VALUES (402, 804);\nINSERT INTO test VALUES (403, 806);\nINSERT INTO test VALUES (404, 808);\nINSERT INTO test VALUES (405, 810);\nINSERT INTO test VALUES (406, 812);\nINSERT INTO test VALUES (407, 814);\nINSERT INTO test VALUES (408, 816);\nINSERT INTO test VALUES (409, 818);\nINSERT INTO test VALUES (410, 820);\nINSERT INTO test VALUES (411, 822);\nINSERT INTO test VALUES (412, 824);\nINSERT INTO test VALUES (413, 826);\nINSERT INTO test VALUES (414, 828);\nINSERT INTO test VALUES (415, 830);\nINSERT INTO test VALUES (416, 832);\nINSERT INTO test VALUES (417, 834);\nINSERT INTO test VALUES (418, 836);\nINSERT INTO test VALUES (419, 838);\nINSERT INTO test VALUES (420, 840);\nINSERT INTO test VALUES (421, 842);\nINSERT INTO test VALUES (422, 844);\nINSERT INTO test VALUES (423, 846);\nINSERT INTO test VALUES (424, 848);\nINSERT INTO test VALUES (425, 850);\nINSERT INTO test VALUES (426, 852);\nINSERT INTO test VALUES (427, 854);\nINSERT INTO test VALUES (428, 856);\nINSERT INTO test VALUES (429, 858);\nINSERT INTO test VALUES (430, 860);\nINSERT INTO test VALUES (431, 862);\nINSERT INTO test VALUES (432, 864);\nINSERT INTO test VALUES (433, 866);\nINSERT INTO test VALUES (434, 868);\nINSERT INTO test VALUES (435, 870);\nINSERT INTO test VALUES (436, 872);\nINSERT INTO test VALUES (437, 874);\nINSERT INTO test VALUES (438, 876);\nINSERT INTO test VALUES (439, 878);\nINSERT INTO test VALUES (440, 880);\nINSERT INTO test VALUES (441, 882);\nINSERT INTO test VALUES (442, 884);\nINSERT INTO test VALUES (443, 886);\nINSERT INTO test VALUES (444, 888);\nINSERT INTO test VALUES (445, 890);\nINSERT INTO test VALUES (446, 892);\nINSERT INTO test VALUES (447, 894);\nINSERT INTO test VALUES (448, 896);\nINSERT INTO test VALUES (449, 898);\nINSERT INTO test VALUES (450, 900);\nINSERT INTO test VALUES (451, 902);\nINSERT INTO test VALUES (452, 904);\nINSERT INTO test VALUES (453, 906);\nINSERT INTO test VALUES (454, 908);\nINSERT INTO test VALUES (455, 910);\nINSERT INTO test VALUES (456, 912);\nINSERT INTO test VALUES (457, 914);\nINSERT INTO test VALUES (458, 916);\nINSERT INTO test VALUES (459, 918);\nINSERT INTO test VALUES (460, 920);\nINSERT INTO test VALUES (461, 922);\nINSERT INTO test VALUES (462, 924);\nINSERT INTO test VALUES (463, 926);\nINSERT INTO test VALUES (464, 928);\nINSERT INTO test VALUES (465, 930);\nINSERT INTO test VALUES (466, 932);\nINSERT INTO test VALUES (467, 934);\nINSERT INTO test VALUES (468, 936);\nINSERT INTO test VALUES (469, 938);\nINSERT INTO test VALUES (470, 940);\nINSERT INTO test VALUES (471, 942);\nINSERT INTO test VALUES (472, 944);\nINSERT INTO test VALUES (473, 946);\nINSERT INTO test VALUES (474, 948);\nINSERT INTO test VALUES (475, 950);\nINSERT INTO test VALUES (476, 952);\nINSERT INTO test VALUES (477, 954);\nINSERT INTO test VALUES (478, 956);\nINSERT INTO test VALUES (479, 958);\nINSERT INTO test VALUES (480, 960);\nINSERT INTO test VALUES (481, 962);\nINSERT INTO test VALUES (482, 964);\nINSERT INTO test VALUES (483, 966);\nINSERT INTO test VALUES (484, 968);\nINSERT INTO test VALUES (485, 970);\nINSERT INTO test VALUES (486, 972);\nINSERT INTO test VALUES (487, 974);\nINSERT INTO test VALUES (488, 976);\nINSERT INTO test VALUES (489, 978);\nINSERT INTO test VALUES (490, 980);\nINSERT INTO test VALUES (491, 982);\nINSERT INTO test VALUES (492, 984);\nINSERT INTO test VALUES (493, 986);\nINSERT INTO test VALUES (494, 988);\nINSERT INTO test VALUES (495, 990);\nINSERT INTO test VALUES (496, 992);\nINSERT INTO test VALUES (497, 994);\nINSERT INTO test VALUES (498, 996);\nINSERT INTO test VALUES (499, 998);\nINSERT INTO test VALUES (500, 1000);\nINSERT INTO test VALUES (501, 1002);\nINSERT INTO test VALUES (502, 1004);\nINSERT INTO test VALUES (503, 1006);\nINSERT INTO test VALUES (504, 1008);\nINSERT INTO test VALUES (505, 1010);\nINSERT INTO test VALUES (506, 1012);\nINSERT INTO test VALUES (507, 1014);\nINSERT INTO test VALUES (508, 1016);\nINSERT INTO test VALUES (509, 1018);\nINSERT INTO test VALUES (510, 1020);\nINSERT INTO test VALUES (511, 1022);\nINSERT INTO test VALUES (512, 1024);\nINSERT INTO test VALUES (513, 1026);\nINSERT INTO test VALUES (514, 1028);\nINSERT INTO test VALUES (515, 1030);\nINSERT INTO test VALUES (516, 1032);\nINSERT INTO test VALUES (517, 1034);\nINSERT INTO test VALUES (518, 1036);\nINSERT INTO test VALUES (519, 1038);\nINSERT INTO test VALUES (520, 1040);\nINSERT INTO test VALUES (521, 1042);\nINSERT INTO test VALUES (522, 1044);\nINSERT INTO test VALUES (523, 1046);\nINSERT INTO test VALUES (524, 1048);\nINSERT INTO test VALUES (525, 1050);\nINSERT INTO test VALUES (526, 1052);\nINSERT INTO test VALUES (527, 1054);\nINSERT INTO test VALUES (528, 1056);\nINSERT INTO test VALUES (529, 1058);\nINSERT INTO test VALUES (530, 1060);\nINSERT INTO test VALUES (531, 1062);\nINSERT INTO test VALUES (532, 1064);\nINSERT INTO test VALUES (533, 1066);\nINSERT INTO test VALUES (534, 1068);\nINSERT INTO test VALUES (535, 1070);\nINSERT INTO test VALUES (536, 1072);\nINSERT INTO test VALUES (537, 1074);\nINSERT INTO test VALUES (538, 1076);\nINSERT INTO test VALUES (539, 1078);\nINSERT INTO test VALUES (540, 1080);\nINSERT INTO test VALUES (541, 1082);\nINSERT INTO test VALUES (542, 1084);\nINSERT INTO test VALUES (543, 1086);\nINSERT INTO test VALUES (544, 1088);\nINSERT INTO test VALUES (545, 1090);\nINSERT INTO test VALUES (546, 1092);\nINSERT INTO test VALUES (547, 1094);\nINSERT INTO test VALUES (548, 1096);\nINSERT INTO test VALUES (549, 1098);\nINSERT INTO test VALUES (550, 1100);\nINSERT INTO test VALUES (551, 1102);\nINSERT INTO test VALUES (552, 1104);\nINSERT INTO test VALUES (553, 1106);\nINSERT INTO test VALUES (554, 1108);\nINSERT INTO test VALUES (555, 1110);\nINSERT INTO test VALUES (556, 1112);\nINSERT INTO test VALUES (557, 1114);\nINSERT INTO test VALUES (558, 1116);\nINSERT INTO test VALUES (559, 1118);\nINSERT INTO test VALUES (560, 1120);\nINSERT INTO test VALUES (561, 1122);\nINSERT INTO test VALUES (562, 1124);\nINSERT INTO test VALUES (563, 1126);\nINSERT INTO test VALUES (564, 1128);\nINSERT INTO test VALUES (565, 1130);\nINSERT INTO test VALUES (566, 1132);\nINSERT INTO test VALUES (567, 1134);\nINSERT INTO test VALUES (568, 1136);\nINSERT INTO test VALUES (569, 1138);\nINSERT INTO test VALUES (570, 1140);\nINSERT INTO test VALUES (571, 1142);\nINSERT INTO test VALUES (572, 1144);\nINSERT INTO test VALUES (573, 1146);\nINSERT INTO test VALUES (574, 1148);\nINSERT INTO test VALUES (575, 1150);\nINSERT INTO test VALUES (576, 1152);\nINSERT INTO test VALUES (577, 1154);\nINSERT INTO test VALUES (578, 1156);\nINSERT INTO test VALUES (579, 1158);\nINSERT INTO test VALUES (580, 1160);\nINSERT INTO test VALUES (581, 1162);\nINSERT INTO test VALUES (582, 1164);\nINSERT INTO test VALUES (583, 1166);\nINSERT INTO test VALUES (584, 1168);\nINSERT INTO test VALUES (585, 1170);\nINSERT INTO test VALUES (586, 1172);\nINSERT INTO test VALUES (587, 1174);\nINSERT INTO test VALUES (588, 1176);\nINSERT INTO test VALUES (589, 1178);\nINSERT INTO test VALUES (590, 1180);\nINSERT INTO test VALUES (591, 1182);\nINSERT INTO test VALUES (592, 1184);\nINSERT INTO test VALUES (593, 1186);\nINSERT INTO test VALUES (594, 1188);\nINSERT INTO test VALUES (595, 1190);\nINSERT INTO test VALUES (596, 1192);\nINSERT INTO test VALUES (597, 1194);\nINSERT INTO test VALUES (598, 1196);\nINSERT INTO test VALUES (599, 1198);\nINSERT INTO test VALUES (600, 1200);\nINSERT INTO test VALUES (601, 1202);\nINSERT INTO test VALUES (602, 1204);\nINSERT INTO test VALUES (603, 1206);\nINSERT INTO test VALUES (604, 1208);\nINSERT INTO test VALUES (605, 1210);\nINSERT INTO test VALUES (606, 1212);\nINSERT INTO test VALUES (607, 1214);\nINSERT INTO test VALUES (608, 1216);\nINSERT INTO test VALUES (609, 1218);\nINSERT INTO test VALUES (610, 1220);\nINSERT INTO test VALUES (611, 1222);\nINSERT INTO test VALUES (612, 1224);\nINSERT INTO test VALUES (613, 1226);\nINSERT INTO test VALUES (614, 1228);\nINSERT INTO test VALUES (615, 1230);\nINSERT INTO test VALUES (616, 1232);\nINSERT INTO test VALUES (617, 1234);\nINSERT INTO test VALUES (618, 1236);\nINSERT INTO test VALUES (619, 1238);\nINSERT INTO test VALUES (620, 1240);\nINSERT INTO test VALUES (621, 1242);\nINSERT INTO test VALUES (622, 1244);\nINSERT INTO test VALUES (623, 1246);\nINSERT INTO test VALUES (624, 1248);\nINSERT INTO test VALUES (625, 1250);\nINSERT INTO test VALUES (626, 1252);\nINSERT INTO test VALUES (627, 1254);\nINSERT INTO test VALUES (628, 1256);\nINSERT INTO test VALUES (629, 1258);\nINSERT INTO test VALUES (630, 1260);\nINSERT INTO test VALUES (631, 1262);\nINSERT INTO test VALUES (632, 1264);\nINSERT INTO test VALUES (633, 1266);\nINSERT INTO test VALUES (634, 1268);\nINSERT INTO test VALUES (635, 1270);\nINSERT INTO test VALUES (636, 1272);\nINSERT INTO test VALUES (637, 1274);\nINSERT INTO test VALUES (638, 1276);\nINSERT INTO test VALUES (639, 1278);\nINSERT INTO test VALUES (640, 1280);\nINSERT INTO test VALUES (641, 1282);\nINSERT INTO test VALUES (642, 1284);\nINSERT INTO test VALUES (643, 1286);\nINSERT INTO test VALUES (644, 1288);\nINSERT INTO test VALUES (645, 1290);\nINSERT INTO test VALUES (646, 1292);\nINSERT INTO test VALUES (647, 1294);\nINSERT INTO test VALUES (648, 1296);\nINSERT INTO test VALUES (649, 1298);\nINSERT INTO test VALUES (650, 1300);\nINSERT INTO test VALUES (651, 1302);\nINSERT INTO test VALUES (652, 1304);\nINSERT INTO test VALUES (653, 1306);\nINSERT INTO test VALUES (654, 1308);\nINSERT INTO test VALUES (655, 1310);\nINSERT INTO test VALUES (656, 1312);\nINSERT INTO test VALUES (657, 1314);\nINSERT INTO test VALUES (658, 1316);\nINSERT INTO test VALUES (659, 1318);\nINSERT INTO test VALUES (660, 1320);\nINSERT INTO test VALUES (661, 1322);\nINSERT INTO test VALUES (662, 1324);\nINSERT INTO test VALUES (663, 1326);\nINSERT INTO test VALUES (664, 1328);\nINSERT INTO test VALUES (665, 1330);\nINSERT INTO test VALUES (666, 1332);\nINSERT INTO test VALUES (667, 1334);\nINSERT INTO test VALUES (668, 1336);\nINSERT INTO test VALUES (669, 1338);\nINSERT INTO test VALUES (670, 1340);\nINSERT INTO test VALUES (671, 1342);\nINSERT INTO test VALUES (672, 1344);\nINSERT INTO test VALUES (673, 1346);\nINSERT INTO test VALUES (674, 1348);\nINSERT INTO test VALUES (675, 1350);\nINSERT INTO test VALUES (676, 1352);\nINSERT INTO test VALUES (677, 1354);\nINSERT INTO test VALUES (678, 1356);\nINSERT INTO test VALUES (679, 1358);\nINSERT INTO test VALUES (680, 1360);\nINSERT INTO test VALUES (681, 1362);\nINSERT INTO test VALUES (682, 1364);\nINSERT INTO test VALUES (683, 1366);\nINSERT INTO test VALUES (684, 1368);\nINSERT INTO test VALUES (685, 1370);\nINSERT INTO test VALUES (686, 1372);\nINSERT INTO test VALUES (687, 1374);\nINSERT INTO test VALUES (688, 1376);\nINSERT INTO test VALUES (689, 1378);\nINSERT INTO test VALUES (690, 1380);\nINSERT INTO test VALUES (691, 1382);\nINSERT INTO test VALUES (692, 1384);\nINSERT INTO test VALUES (693, 1386);\nINSERT INTO test VALUES (694, 1388);\nINSERT INTO test VALUES (695, 1390);\nINSERT INTO test VALUES (696, 1392);\nINSERT INTO test VALUES (697, 1394);\nINSERT INTO test VALUES (698, 1396);\nINSERT INTO test VALUES (699, 1398);\nINSERT INTO test VALUES (700, 1400);\nINSERT INTO test VALUES (701, 1402);\nINSERT INTO test VALUES (702, 1404);\nINSERT INTO test VALUES (703, 1406);\nINSERT INTO test VALUES (704, 1408);\nINSERT INTO test VALUES (705, 1410);\nINSERT INTO test VALUES (706, 1412);\nINSERT INTO test VALUES (707, 1414);\nINSERT INTO test VALUES (708, 1416);\nINSERT INTO test VALUES (709, 1418);\nINSERT INTO test VALUES (710, 1420);\nINSERT INTO test VALUES (711, 1422);\nINSERT INTO test VALUES (712, 1424);\nINSERT INTO test VALUES (713, 1426);\nINSERT INTO test VALUES (714, 1428);\nINSERT INTO test VALUES (715, 1430);\nINSERT INTO test VALUES (716, 1432);\nINSERT INTO test VALUES (717, 1434);\nINSERT INTO test VALUES (718, 1436);\nINSERT INTO test VALUES (719, 1438);\nINSERT INTO test VALUES (720, 1440);\nINSERT INTO test VALUES (721, 1442);\nINSERT INTO test VALUES (722, 1444);\nINSERT INTO test VALUES (723, 1446);\nINSERT INTO test VALUES (724, 1448);\nINSERT INTO test VALUES (725, 1450);\nINSERT INTO test VALUES (726, 1452);\nINSERT INTO test VALUES (727, 1454);\nINSERT INTO test VALUES (728, 1456);\nINSERT INTO test VALUES (729, 1458);\nINSERT INTO test VALUES (730, 1460);\nINSERT INTO test VALUES (731, 1462);\nINSERT INTO test VALUES (732, 1464);\nINSERT INTO test VALUES (733, 1466);\nINSERT INTO test VALUES (734, 1468);\nINSERT INTO test VALUES (735, 1470);\nINSERT INTO test VALUES (736, 1472);\nINSERT INTO test VALUES (737, 1474);\nINSERT INTO test VALUES (738, 1476);\nINSERT INTO test VALUES (739, 1478);\nINSERT INTO test VALUES (740, 1480);\nINSERT INTO test VALUES (741, 1482);\nINSERT INTO test VALUES (742, 1484);\nINSERT INTO test VALUES (743, 1486);\nINSERT INTO test VALUES (744, 1488);\nINSERT INTO test VALUES (745, 1490);\nINSERT INTO test VALUES (746, 1492);\nINSERT INTO test VALUES (747, 1494);\nINSERT INTO test VALUES (748, 1496);\nINSERT INTO test VALUES (749, 1498);\nINSERT INTO test VALUES (750, 1500);\nINSERT INTO test VALUES (751, 1502);\nINSERT INTO test VALUES (752, 1504);\nINSERT INTO test VALUES (753, 1506);\nINSERT INTO test VALUES (754, 1508);\nINSERT INTO test VALUES (755, 1510);\nINSERT INTO test VALUES (756, 1512);\nINSERT INTO test VALUES (757, 1514);\nINSERT INTO test VALUES (758, 1516);\nINSERT INTO test VALUES (759, 1518);\nINSERT INTO test VALUES (760, 1520);\nINSERT INTO test VALUES (761, 1522);\nINSERT INTO test VALUES (762, 1524);\nINSERT INTO test VALUES (763, 1526);\nINSERT INTO test VALUES (764, 1528);\nINSERT INTO test VALUES (765, 1530);\nINSERT INTO test VALUES (766, 1532);\nINSERT INTO test VALUES (767, 1534);\nINSERT INTO test VALUES (768, 1536);\nINSERT INTO test VALUES (769, 1538);\nINSERT INTO test VALUES (770, 1540);\nINSERT INTO test VALUES (771, 1542);\nINSERT INTO test VALUES (772, 1544);\nINSERT INTO test VALUES (773, 1546);\nINSERT INTO test VALUES (774, 1548);\nINSERT INTO test VALUES (775, 1550);\nINSERT INTO test VALUES (776, 1552);\nINSERT INTO test VALUES (777, 1554);\nINSERT INTO test VALUES (778, 1556);\nINSERT INTO test VALUES (779, 1558);\nINSERT INTO test VALUES (780, 1560);\nINSERT INTO test VALUES (781, 1562);\nINSERT INTO test VALUES (782, 1564);\nINSERT INTO test VALUES (783, 1566);\nINSERT INTO test VALUES (784, 1568);\nINSERT INTO test VALUES (785, 1570);\nINSERT INTO test VALUES (786, 1572);\nINSERT INTO test VALUES (787, 1574);\nINSERT INTO test VALUES (788, 1576);\nINSERT INTO test VALUES (789, 1578);\nINSERT INTO test VALUES (790, 1580);\nINSERT INTO test VALUES (791, 1582);\nINSERT INTO test VALUES (792, 1584);\nINSERT INTO test VALUES (793, 1586);\nINSERT INTO test VALUES (794, 1588);\nINSERT INTO test VALUES (795, 1590);\nINSERT INTO test VALUES (796, 1592);\nINSERT INTO test VALUES (797, 1594);\nINSERT INTO test VALUES (798, 1596);\nINSERT INTO test VALUES (799, 1598);\nINSERT INTO test VALUES (800, 1600);\nINSERT INTO test VALUES (801, 1602);\nINSERT INTO test VALUES (802, 1604);\nINSERT INTO test VALUES (803, 1606);\nINSERT INTO test VALUES (804, 1608);\nINSERT INTO test VALUES (805, 1610);\nINSERT INTO test VALUES (806, 1612);\nINSERT INTO test VALUES (807, 1614);\nINSERT INTO test VALUES (808, 1616);\nINSERT INTO test VALUES (809, 1618);\nINSERT INTO test VALUES (810, 1620);\nINSERT INTO test VALUES (811, 1622);\nINSERT INTO test VALUES (812, 1624);\nINSERT INTO test VALUES (813, 1626);\nINSERT INTO test VALUES (814, 1628);\nINSERT INTO test VALUES (815, 1630);\nINSERT INTO test VALUES (816, 1632);\nINSERT INTO test VALUES (817, 1634);\nINSERT INTO test VALUES (818, 1636);\nINSERT INTO test VALUES (819, 1638);\nINSERT INTO test VALUES (820, 1640);\nINSERT INTO test VALUES (821, 1642);\nINSERT INTO test VALUES (822, 1644);\nINSERT INTO test VALUES (823, 1646);\nINSERT INTO test VALUES (824, 1648);\nINSERT INTO test VALUES (825, 1650);\nINSERT INTO test VALUES (826, 1652);\nINSERT INTO test VALUES (827, 1654);\nINSERT INTO test VALUES (828, 1656);\nINSERT INTO test VALUES (829, 1658);\nINSERT INTO test VALUES (830, 1660);\nINSERT INTO test VALUES (831, 1662);\nINSERT INTO test VALUES (832, 1664);\nINSERT INTO test VALUES (833, 1666);\nINSERT INTO test VALUES (834, 1668);\nINSERT INTO test VALUES (835, 1670);\nINSERT INTO test VALUES (836, 1672);\nINSERT INTO test VALUES (837, 1674);\nINSERT INTO test VALUES (838, 1676);\nINSERT INTO test VALUES (839, 1678);\nINSERT INTO test VALUES (840, 1680);\nINSERT INTO test VALUES (841, 1682);\nINSERT INTO test VALUES (842, 1684);\nINSERT INTO test VALUES (843, 1686);\nINSERT INTO test VALUES (844, 1688);\nINSERT INTO test VALUES (845, 1690);\nINSERT INTO test VALUES (846, 1692);\nINSERT INTO test VALUES (847, 1694);\nINSERT INTO test VALUES (848, 1696);\nINSERT INTO test VALUES (849, 1698);\nINSERT INTO test VALUES (850, 1700);\nINSERT INTO test VALUES (851, 1702);\nINSERT INTO test VALUES (852, 1704);\nINSERT INTO test VALUES (853, 1706);\nINSERT INTO test VALUES (854, 1708);\nINSERT INTO test VALUES (855, 1710);\nINSERT INTO test VALUES (856, 1712);\nINSERT INTO test VALUES (857, 1714);\nINSERT INTO test VALUES (858, 1716);\nINSERT INTO test VALUES (859, 1718);\nINSERT INTO test VALUES (860, 1720);\nINSERT INTO test VALUES (861, 1722);\nINSERT INTO test VALUES (862, 1724);\nINSERT INTO test VALUES (863, 1726);\nINSERT INTO test VALUES (864, 1728);\nINSERT INTO test VALUES (865, 1730);\nINSERT INTO test VALUES (866, 1732);\nINSERT INTO test VALUES (867, 1734);\nINSERT INTO test VALUES (868, 1736);\nINSERT INTO test VALUES (869, 1738);\nINSERT INTO test VALUES (870, 1740);\nINSERT INTO test VALUES (871, 1742);\nINSERT INTO test VALUES (872, 1744);\nINSERT INTO test VALUES (873, 1746);\nINSERT INTO test VALUES (874, 1748);\nINSERT INTO test VALUES (875, 1750);\nINSERT INTO test VALUES (876, 1752);\nINSERT INTO test VALUES (877, 1754);\nINSERT INTO test VALUES (878, 1756);\nINSERT INTO test VALUES (879, 1758);\nINSERT INTO test VALUES (880, 1760);\nINSERT INTO test VALUES (881, 1762);\nINSERT INTO test VALUES (882, 1764);\nINSERT INTO test VALUES (883, 1766);\nINSERT INTO test VALUES (884, 1768);\nINSERT INTO test VALUES (885, 1770);\nINSERT INTO test VALUES (886, 1772);\nINSERT INTO test VALUES (887, 1774);\nINSERT INTO test VALUES (888, 1776);\nINSERT INTO test VALUES (889, 1778);\nINSERT INTO test VALUES (890, 1780);\nINSERT INTO test VALUES (891, 1782);\nINSERT INTO test VALUES (892, 1784);\nINSERT INTO test VALUES (893, 1786);\nINSERT INTO test VALUES (894, 1788);\nINSERT INTO test VALUES (895, 1790);\nINSERT INTO test VALUES (896, 1792);\nINSERT INTO test VALUES (897, 1794);\nINSERT INTO test VALUES (898, 1796);\nINSERT INTO test VALUES (899, 1798);\nINSERT INTO test VALUES (900, 1800);\nINSERT INTO test VALUES (901, 1802);\nINSERT INTO test VALUES (902, 1804);\nINSERT INTO test VALUES (903, 1806);\nINSERT INTO test VALUES (904, 1808);\nINSERT INTO test VALUES (905, 1810);\nINSERT INTO test VALUES (906, 1812);\nINSERT INTO test VALUES (907, 1814);\nINSERT INTO test VALUES (908, 1816);\nINSERT INTO test VALUES (909, 1818);\nINSERT INTO test VALUES (910, 1820);\nINSERT INTO test VALUES (911, 1822);\nINSERT INTO test VALUES (912, 1824);\nINSERT INTO test VALUES (913, 1826);\nINSERT INTO test VALUES (914, 1828);\nINSERT INTO test VALUES (915, 1830);\nINSERT INTO test VALUES (916, 1832);\nINSERT INTO test VALUES (917, 1834);\nINSERT INTO test VALUES (918, 1836);\nINSERT INTO test VALUES (919, 1838);\nINSERT INTO test VALUES (920, 1840);\nINSERT INTO test VALUES (921, 1842);\nINSERT INTO test VALUES (922, 1844);\nINSERT INTO test VALUES (923, 1846);\nINSERT INTO test VALUES (924, 1848);\nINSERT INTO test VALUES (925, 1850);\nINSERT INTO test VALUES (926, 1852);\nINSERT INTO test VALUES (927, 1854);\nINSERT INTO test VALUES (928, 1856);\nINSERT INTO test VALUES (929, 1858);\nINSERT INTO test VALUES (930, 1860);\nINSERT INTO test VALUES (931, 1862);\nINSERT INTO test VALUES (932, 1864);\nINSERT INTO test VALUES (933, 1866);\nINSERT INTO test VALUES (934, 1868);\nINSERT INTO test VALUES (935, 1870);\nINSERT INTO test VALUES (936, 1872);\nINSERT INTO test VALUES (937, 1874);\nINSERT INTO test VALUES (938, 1876);\nINSERT INTO test VALUES (939, 1878);\nINSERT INTO test VALUES (940, 1880);\nINSERT INTO test VALUES (941, 1882);\nINSERT INTO test VALUES (942, 1884);\nINSERT INTO test VALUES (943, 1886);\nINSERT INTO test VALUES (944, 1888);\nINSERT INTO test VALUES (945, 1890);\nINSERT INTO test VALUES (946, 1892);\nINSERT INTO test VALUES (947, 1894);\nINSERT INTO test VALUES (948, 1896);\nINSERT INTO test VALUES (949, 1898);\nINSERT INTO test VALUES (950, 1900);\nINSERT INTO test VALUES (951, 1902);\nINSERT INTO test VALUES (952, 1904);\nINSERT INTO test VALUES (953, 1906);\nINSERT INTO test VALUES (954, 1908);\nINSERT INTO test VALUES (955, 1910);\nINSERT INTO test VALUES (956, 1912);\nINSERT INTO test VALUES (957, 1914);\nINSERT INTO test VALUES (958, 1916);\nINSERT INTO test VALUES (959, 1918);\nINSERT INTO test VALUES (960, 1920);\nINSERT INTO test VALUES (961, 1922);\nINSERT INTO test VALUES (962, 1924);\nINSERT INTO test VALUES (963, 1926);\nINSERT INTO test VALUES (964, 1928);\nINSERT INTO test VALUES (965, 1930);\nINSERT INTO test VALUES (966, 1932);\nINSERT INTO test VALUES (967, 1934);\nINSERT INTO test VALUES (968, 1936);\nINSERT INTO test VALUES (969, 1938);\nINSERT INTO test VALUES (970, 1940);\nINSERT INTO test VALUES (971, 1942);\nINSERT INTO test VALUES (972, 1944);\nINSERT INTO test VALUES (973, 1946);\nINSERT INTO test VALUES (974, 1948);\nINSERT INTO test VALUES (975, 1950);\nINSERT INTO test VALUES (976, 1952);\nINSERT INTO test VALUES (977, 1954);\nINSERT INTO test VALUES (978, 1956);\nINSERT INTO test VALUES (979, 1958);\nINSERT INTO test VALUES (980, 1960);\nINSERT INTO test VALUES (981, 1962);\nINSERT INTO test VALUES (982, 1964);\nINSERT INTO test VALUES (983, 1966);\nINSERT INTO test VALUES (984, 1968);\nINSERT INTO test VALUES (985, 1970);\nINSERT INTO test VALUES (986, 1972);\nINSERT INTO test VALUES (987, 1974);\nINSERT INTO test VALUES (988, 1976);\nINSERT INTO test VALUES (989, 1978);\nINSERT INTO test VALUES (990, 1980);\nINSERT INTO test VALUES (991, 1982);\nINSERT INTO test VALUES (992, 1984);\nINSERT INTO test VALUES (993, 1986);\nINSERT INTO test VALUES (994, 1988);\nINSERT INTO test VALUES (995, 1990);\nINSERT INTO test VALUES (996, 1992);\nINSERT INTO test VALUES (997, 1994);\nINSERT INTO test VALUES (998, 1996);\nINSERT INTO test VALUES (999, 1998);\nSELECT * FROM test;\nUPDATE test SET num = 0 WHERE id = 0;\nUPDATE test SET num = 3 WHERE id = 1;\nUPDATE test SET num = 6 WHERE id = 2;\nUPDATE test SET num = 9 WHERE id = 3;\nUPDATE test SET num = 12 WHERE id = 4;\nUPDATE test SET num = 15 WHERE id = 5;\nUPDATE test SET num = 18 WHERE id = 6;\nUPDATE test SET num = 21 WHERE id = 7;\nUPDATE test SET num = 24 WHERE id = 8;\nUPDATE test SET num = 27 WHERE id = 9;\nUPDATE test SET num = 30 WHERE id = 10;\nUPDATE test SET num = 33 WHERE id = 11;\nUPDATE test SET num = 36 WHERE id = 12;\nUPDATE test SET num = 39 WHERE id = 13;\nUPDATE test SET num = 42 WHERE id = 14;\nUPDATE test SET num = 45 WHERE id = 15;\nUPDATE test SET num = 48 WHERE id = 16;\nUPDATE test SET num = 51 WHERE id = 17;\nUPDATE test SET num = 54 WHERE id = 18;\nUPDATE test SET num = 57 WHERE id = 19;\nUPDATE test SET num = 60 WHERE id = 20;\nUPDATE test SET num = 63 WHERE id = 21;\nUPDATE test SET num = 66 WHERE id = 22;\nUPDATE test SET num = 69 WHERE id = 23;\nUPDATE test SET num = 72 WHERE id = 24;\nUPDATE test SET num = 75 WHERE id = 25;\nUPDATE test SET num = 78 WHERE id = 26;\nUPDATE test SET num = 81 WHERE id = 27;\nUPDATE test SET num = 84 WHERE id = 28;\nUPDATE test SET num = 87 WHERE id = 29;\nUPDATE test SET num = 90 WHERE id = 30;\nUPDATE test SET num = 93 WHERE id = 31;\nUPDATE test SET num = 96 WHERE id = 32;\nUPDATE test SET num = 99 WHERE id = 33;\nUPDATE test SET num = 102 WHERE id = 34;\nUPDATE test SET num = 105 WHERE id = 35;\nUPDATE test SET num = 108 WHERE id = 36;\nUPDATE test SET num = 111 WHERE id = 37;\nUPDATE test SET num = 114 WHERE id = 38;\nUPDATE test SET num = 117 WHERE id = 39;\nUPDATE test SET num = 120 WHERE id = 40;\nUPDATE test SET num = 123 WHERE id = 41;\nUPDATE test SET num = 126 WHERE id = 42;\nUPDATE test SET num = 129 WHERE id = 43;\nUPDATE test SET num = 132 WHERE id = 44;\nUPDATE test SET num = 135 WHERE id = 45;\nUPDATE test SET num = 138 WHERE id = 46;\nUPDATE test SET num = 141 WHERE id = 47;\nUPDATE test SET num = 144 WHERE id = 48;\nUPDATE test SET num = 147 WHERE id = 49;\nUPDATE test SET num = 150 WHERE id = 50;\nUPDATE test SET num = 153 WHERE id = 51;\nUPDATE test SET num = 156 WHERE id = 52;\nUPDATE test SET num = 159 WHERE id = 53;\nUPDATE test SET num = 162 WHERE id = 54;\nUPDATE test SET num = 165 WHERE id = 55;\nUPDATE test SET num = 168 WHERE id = 56;\nUPDATE test SET num = 171 WHERE id = 57;\nUPDATE test SET num = 174 WHERE id = 58;\nUPDATE test SET num = 177 WHERE id = 59;\nUPDATE test SET num = 180 WHERE id = 60;\nUPDATE test SET num = 183 WHERE id = 61;\nUPDATE test SET num = 186 WHERE id = 62;\nUPDATE test SET num = 189 WHERE id = 63;\nUPDATE test SET num = 192 WHERE id = 64;\nUPDATE test SET num = 195 WHERE id = 65;\nUPDATE test SET num = 198 WHERE id = 66;\nUPDATE test SET num = 201 WHERE id = 67;\nUPDATE test SET num = 204 WHERE id = 68;\nUPDATE test SET num = 207 WHERE id = 69;\nUPDATE test SET num = 210 WHERE id = 70;\nUPDATE test SET num = 213 WHERE id = 71;\nUPDATE test SET num = 216 WHERE id = 72;\nUPDATE test SET num = 219 WHERE id = 73;\nUPDATE test SET num = 222 WHERE id = 74;\nUPDATE test SET num = 225 WHERE id = 75;\nUPDATE test SET num = 228 WHERE id = 76;\nUPDATE test SET num = 231 WHERE id = 77;\nUPDATE test SET num = 234 WHERE id = 78;\nUPDATE test SET num = 237 WHERE id = 79;\nUPDATE test SET num = 240 WHERE id = 80;\nUPDATE test SET num = 243 WHERE id = 81;\nUPDATE test SET num = 246 WHERE id = 82;\nUPDATE test SET num = 249 WHERE id = 83;\nUPDATE test SET num = 252 WHERE id = 84;\nUPDATE test SET num = 255 WHERE id = 85;\nUPDATE test SET num = 258 WHERE id = 86;\nUPDATE test SET num = 261 WHERE id = 87;\nUPDATE test SET num = 264 WHERE id = 88;\nUPDATE test SET num = 267 WHERE id = 89;\nUPDATE test SET num = 270 WHERE id = 90;\nUPDATE test SET num = 273 WHERE id = 91;\nUPDATE test SET num = 276 WHERE id = 92;\nUPDATE test SET num = 279 WHERE id = 93;\nUPDATE test SET num = 282 WHERE id = 94;\nUPDATE test SET num = 285 WHERE id = 95;\nUPDATE test SET num = 288 WHERE id = 96;\nUPDATE test SET num = 291 WHERE id = 97;\nUPDATE test SET num = 294 WHERE id = 98;\nUPDATE test SET num = 297 WHERE id = 99;\nUPDATE test SET num = 300 WHERE id = 100;\nUPDATE test SET num = 303 WHERE id = 101;\nUPDATE test SET num = 306 WHERE id = 102;\nUPDATE test SET num = 309 WHERE id = 103;\nUPDATE test SET num = 312 WHERE id = 104;\nUPDATE test SET num = 315 WHERE id = 105;\nUPDATE test SET num = 318 WHERE id = 106;\nUPDATE test SET num = 321 WHERE id = 107;\nUPDATE test SET num = 324 WHERE id = 108;\nUPDATE test SET num = 327 WHERE id = 109;\nUPDATE test SET num = 330 WHERE id = 110;\nUPDATE test SET num = 333 WHERE id = 111;\nUPDATE test SET num = 336 WHERE id = 112;\nUPDATE test SET num = 339 WHERE id = 113;\nUPDATE test SET num = 342 WHERE id = 114;\nUPDATE test SET num = 345 WHERE id = 115;\nUPDATE test SET num = 348 WHERE id = 116;\nUPDATE test SET num = 351 WHERE id = 117;\nUPDATE test SET num = 354 WHERE id = 118;\nUPDATE test SET num = 357 WHERE id = 119;\nUPDATE test SET num = 360 WHERE id = 120;\nUPDATE test SET num = 363 WHERE id = 121;\nUPDATE test SET num = 366 WHERE id = 122;\nUPDATE test SET num = 369 WHERE id = 123;\nUPDATE test SET num = 372 WHERE id = 124;\nUPDATE test SET num = 375 WHERE id = 125;\nUPDATE test SET num = 378 WHERE id = 126;\nUPDATE test SET num = 381 WHERE id = 127;\nUPDATE test SET num = 384 WHERE id = 128;\nUPDATE test SET num = 387 WHERE id = 129;\nUPDATE test SET num = 390 WHERE id = 130;\nUPDATE test SET num = 393 WHERE id = 131;\nUPDATE test SET num = 396 WHERE id = 132;\nUPDATE test SET num = 399 WHERE id = 133;\nUPDATE test SET num = 402 WHERE id = 134;\nUPDATE test SET num = 405 WHERE id = 135;\nUPDATE test SET num = 408 WHERE id = 136;\nUPDATE test SET num = 411 WHERE id = 137;\nUPDATE test SET num = 414 WHERE id = 138;\nUPDATE test SET num = 417 WHERE id = 139;\nUPDATE test SET num = 420 WHERE id = 140;\nUPDATE test SET num = 423 WHERE id = 141;\nUPDATE test SET num = 426 WHERE id = 142;\nUPDATE test SET num = 429 WHERE id = 143;\nUPDATE test SET num = 432 WHERE id = 144;\nUPDATE test SET num = 435 WHERE id = 145;\nUPDATE test SET num = 438 WHERE id = 146;\nUPDATE test SET num = 441 WHERE id = 147;\nUPDATE test SET num = 444 WHERE id = 148;\nUPDATE test SET num = 447 WHERE id = 149;\nUPDATE test SET num = 450 WHERE id = 150;\nUPDATE test SET num = 453 WHERE id = 151;\nUPDATE test SET num = 456 WHERE id = 152;\nUPDATE test SET num = 459 WHERE id = 153;\nUPDATE test SET num = 462 WHERE id = 154;\nUPDATE test SET num = 465 WHERE id = 155;\nUPDATE test SET num = 468 WHERE id = 156;\nUPDATE test SET num = 471 WHERE id = 157;\nUPDATE test SET num = 474 WHERE id = 158;\nUPDATE test SET num = 477 WHERE id = 159;\nUPDATE test SET num = 480 WHERE id = 160;\nUPDATE test SET num = 483 WHERE id = 161;\nUPDATE test SET num = 486 WHERE id = 162;\nUPDATE test SET num = 489 WHERE id = 163;\nUPDATE test SET num = 492 WHERE id = 164;\nUPDATE test SET num = 495 WHERE id = 165;\nUPDATE test SET num = 498 WHERE id = 166;\nUPDATE test SET num = 501 WHERE id = 167;\nUPDATE test SET num = 504 WHERE id = 168;\nUPDATE test SET num = 507 WHERE id = 169;\nUPDATE test SET num = 510 WHERE id = 170;\nUPDATE test SET num = 513 WHERE id = 171;\nUPDATE test SET num = 516 WHERE id = 172;\nUPDATE test SET num = 519 WHERE id = 173;\nUPDATE test SET num = 522 WHERE id = 174;\nUPDATE test SET num = 525 WHERE id = 175;\nUPDATE test SET num = 528 WHERE id = 176;\nUPDATE test SET num = 531 WHERE id = 177;\nUPDATE test SET num = 534 WHERE id = 178;\nUPDATE test SET num = 537 WHERE id = 179;\nUPDATE test SET num = 540 WHERE id = 180;\nUPDATE test SET num = 543 WHERE id = 181;\nUPDATE test SET num = 546 WHERE id = 182;\nUPDATE test SET num = 549 WHERE id = 183;\nUPDATE test SET num = 552 WHERE id = 184;\nUPDATE test SET num = 555 WHERE id = 185;\nUPDATE test SET num = 558 WHERE id = 186;\nUPDATE test SET num = 561 WHERE id = 187;\nUPDATE test SET num = 564 WHERE id = 188;\nUPDATE test SET num = 567 WHERE id = 189;\nUPDATE test SET num = 570 WHERE id = 190;\nUPDATE test SET num = 573 WHERE id = 191;\nUPDATE test SET num = 576 WHERE id = 192;\nUPDATE test SET num = 579 WHERE id = 193;\nUPDATE test SET num = 582 WHERE id = 194;\nUPDATE test SET num = 585 WHERE id = 195;\nUPDATE test SET num = 588 WHERE id = 196;\nUPDATE test SET num = 591 WHERE id = 197;\nUPDATE test SET num = 594 WHERE id = 198;\nUPDATE test SET num = 597 WHERE id = 199;\nUPDATE test SET num = 600 WHERE id = 200;\nUPDATE test SET num = 603 WHERE id = 201;\nUPDATE test SET num = 606 WHERE id = 202;\nUPDATE test SET num = 609 WHERE id = 203;\nUPDATE test SET num = 612 WHERE id = 204;\nUPDATE test SET num = 615 WHERE id = 205;\nUPDATE test SET num = 618 WHERE id = 206;\nUPDATE test SET num = 621 WHERE id = 207;\nUPDATE test SET num = 624 WHERE id = 208;\nUPDATE test SET num = 627 WHERE id = 209;\nUPDATE test SET num = 630 WHERE id = 210;\nUPDATE test SET num = 633 WHERE id = 211;\nUPDATE test SET num = 636 WHERE id = 212;\nUPDATE test SET num = 639 WHERE id = 213;\nUPDATE test SET num = 642 WHERE id = 214;\nUPDATE test SET num = 645 WHERE id = 215;\nUPDATE test SET num = 648 WHERE id = 216;\nUPDATE test SET num = 651 WHERE id = 217;\nUPDATE test SET num = 654 WHERE id = 218;\nUPDATE test SET num = 657 WHERE id = 219;\nUPDATE test SET num = 660 WHERE id = 220;\nUPDATE test SET num = 663 WHERE id = 221;\nUPDATE test SET num = 666 WHERE id = 222;\nUPDATE test SET num = 669 WHERE id = 223;\nUPDATE test SET num = 672 WHERE id = 224;\nUPDATE test SET num = 675 WHERE id = 225;\nUPDATE test SET num = 678 WHERE id = 226;\nUPDATE test SET num = 681 WHERE id = 227;\nUPDATE test SET num = 684 WHERE id = 228;\nUPDATE test SET num = 687 WHERE id = 229;\nUPDATE test SET num = 690 WHERE id = 230;\nUPDATE test SET num = 693 WHERE id = 231;\nUPDATE test SET num = 696 WHERE id = 232;\nUPDATE test SET num = 699 WHERE id = 233;\nUPDATE test SET num = 702 WHERE id = 234;\nUPDATE test SET num = 705 WHERE id = 235;\nUPDATE test SET num = 708 WHERE id = 236;\nUPDATE test SET num = 711 WHERE id = 237;\nUPDATE test SET num = 714 WHERE id = 238;\nUPDATE test SET num = 717 WHERE id = 239;\nUPDATE test SET num = 720 WHERE id = 240;\nUPDATE test SET num = 723 WHERE id = 241;\nUPDATE test SET num = 726 WHERE id = 242;\nUPDATE test SET num = 729 WHERE id = 243;\nUPDATE test SET num = 732 WHERE id = 244;\nUPDATE test SET num = 735 WHERE id = 245;\nUPDATE test SET num = 738 WHERE id = 246;\nUPDATE test SET num = 741 WHERE id = 247;\nUPDATE test SET num = 744 WHERE id = 248;\nUPDATE test SET num = 747 WHERE id = 249;\nUPDATE test SET num = 750 WHERE id = 250;\nUPDATE test SET num = 753 WHERE id = 251;\nUPDATE test SET num = 756 WHERE id = 252;\nUPDATE test SET num = 759 WHERE id = 253;\nUPDATE test SET num = 762 WHERE id = 254;\nUPDATE test SET num = 765 WHERE id = 255;\nUPDATE test SET num = 768 WHERE id = 256;\nUPDATE test SET num = 771 WHERE id = 257;\nUPDATE test SET num = 774 WHERE id = 258;\nUPDATE test SET num = 777 WHERE id = 259;\nUPDATE test SET num = 780 WHERE id = 260;\nUPDATE test SET num = 783 WHERE id = 261;\nUPDATE test SET num = 786 WHERE id = 262;\nUPDATE test SET num = 789 WHERE id = 263;\nUPDATE test SET num = 792 WHERE id = 264;\nUPDATE test SET num = 795 WHERE id = 265;\nUPDATE test SET num = 798 WHERE id = 266;\nUPDATE test SET num = 801 WHERE id = 267;\nUPDATE test SET num = 804 WHERE id = 268;\nUPDATE test SET num = 807 WHERE id = 269;\nUPDATE test SET num = 810 WHERE id = 270;\nUPDATE test SET num = 813 WHERE id = 271;\nUPDATE test SET num = 816 WHERE id = 272;\nUPDATE test SET num = 819 WHERE id = 273;\nUPDATE test SET num = 822 WHERE id = 274;\nUPDATE test SET num = 825 WHERE id = 275;\nUPDATE test SET num = 828 WHERE id = 276;\nUPDATE test SET num = 831 WHERE id = 277;\nUPDATE test SET num = 834 WHERE id = 278;\nUPDATE test SET num = 837 WHERE id = 279;\nUPDATE test SET num = 840 WHERE id = 280;\nUPDATE test SET num = 843 WHERE id = 281;\nUPDATE test SET num = 846 WHERE id = 282;\nUPDATE test SET num = 849 WHERE id = 283;\nUPDATE test SET num = 852 WHERE id = 284;\nUPDATE test SET num = 855 WHERE id = 285;\nUPDATE test SET num = 858 WHERE id = 286;\nUPDATE test SET num = 861 WHERE id = 287;\nUPDATE test SET num = 864 WHERE id = 288;\nUPDATE test SET num = 867 WHERE id = 289;\nUPDATE test SET num = 870 WHERE id = 290;\nUPDATE test SET num = 873 WHERE id = 291;\nUPDATE test SET num = 876 WHERE id = 292;\nUPDATE test SET num = 879 WHERE id = 293;\nUPDATE test SET num = 882 WHERE id = 294;\nUPDATE test SET num = 885 WHERE id = 295;\nUPDATE test SET num = 888 WHERE id = 296;\nUPDATE test SET num = 891 WHERE id = 297;\nUPDATE test SET num = 894 WHERE id = 298;\nUPDATE test SET num = 897 WHERE id = 299;\nUPDATE test SET num = 900 WHERE id = 300;\nUPDATE test SET num = 903 WHERE id = 301;\nUPDATE test SET num = 906 WHERE id = 302;\nUPDATE test SET num = 909 WHERE id = 303;\nUPDATE test SET num = 912 WHERE id = 304;\nUPDATE test SET num = 915 WHERE id = 305;\nUPDATE test SET num = 918 WHERE id = 306;\nUPDATE test SET num = 921 WHERE id = 307;\nUPDATE test SET num = 924 WHERE id = 308;\nUPDATE test SET num = 927 WHERE id = 309;\nUPDATE test SET num = 930 WHERE id = 310;\nUPDATE test SET num = 933 WHERE id = 311;\nUPDATE test SET num = 936 WHERE id = 312;\nUPDATE test SET num = 939 WHERE id = 313;\nUPDATE test SET num = 942 WHERE id = 314;\nUPDATE test SET num = 945 WHERE id = 315;\nUPDATE test SET num = 948 WHERE id = 316;\nUPDATE test SET num = 951 WHERE id = 317;\nUPDATE test SET num = 954 WHERE id = 318;\nUPDATE test SET num = 957 WHERE id = 319;\nUPDATE test SET num = 960 WHERE id = 320;\nUPDATE test SET num = 963 WHERE id = 321;\nUPDATE test SET num = 966 WHERE id = 322;\nUPDATE test SET num = 969 WHERE id = 323;\nUPDATE test SET num = 972 WHERE id = 324;\nUPDATE test SET num = 975 WHERE id = 325;\nUPDATE test SET num = 978 WHERE id = 326;\nUPDATE test SET num = 981 WHERE id = 327;\nUPDATE test SET num = 984 WHERE id = 328;\nUPDATE test SET num = 987 WHERE id = 329;\nUPDATE test SET num = 990 WHERE id = 330;\nUPDATE test SET num = 993 WHERE id = 331;\nUPDATE test SET num = 996 WHERE id = 332;\nUPDATE test SET num = 999 WHERE id = 333;\nUPDATE test SET num = 1002 WHERE id = 334;\nUPDATE test SET num = 1005 WHERE id = 335;\nUPDATE test SET num = 1008 WHERE id = 336;\nUPDATE test SET num = 1011 WHERE id = 337;\nUPDATE test SET num = 1014 WHERE id = 338;\nUPDATE test SET num = 1017 WHERE id = 339;\nUPDATE test SET num = 1020 WHERE id = 340;\nUPDATE test SET num = 1023 WHERE id = 341;\nUPDATE test SET num = 1026 WHERE id = 342;\nUPDATE test SET num = 1029 WHERE id = 343;\nUPDATE test SET num = 1032 WHERE id = 344;\nUPDATE test SET num = 1035 WHERE id = 345;\nUPDATE test SET num = 1038 WHERE id = 346;\nUPDATE test SET num = 1041 WHERE id = 347;\nUPDATE test SET num = 1044 WHERE id = 348;\nUPDATE test SET num = 1047 WHERE id = 349;\nUPDATE test SET num = 1050 WHERE id = 350;\nUPDATE test SET num = 1053 WHERE id = 351;\nUPDATE test SET num = 1056 WHERE id = 352;\nUPDATE test SET num = 1059 WHERE id = 353;\nUPDATE test SET num = 1062 WHERE id = 354;\nUPDATE test SET num = 1065 WHERE id = 355;\nUPDATE test SET num = 1068 WHERE id = 356;\nUPDATE test SET num = 1071 WHERE id = 357;\nUPDATE test SET num = 1074 WHERE id = 358;\nUPDATE test SET num = 1077 WHERE id = 359;\nUPDATE test SET num = 1080 WHERE id = 360;\nUPDATE test SET num = 1083 WHERE id = 361;\nUPDATE test SET num = 1086 WHERE id = 362;\nUPDATE test SET num = 1089 WHERE id = 363;\nUPDATE test SET num = 1092 WHERE id = 364;\nUPDATE test SET num = 1095 WHERE id = 365;\nUPDATE test SET num = 1098 WHERE id = 366;\nUPDATE test SET num = 1101 WHERE id = 367;\nUPDATE test SET num = 1104 WHERE id = 368;\nUPDATE test SET num = 1107 WHERE id = 369;\nUPDATE test SET num = 1110 WHERE id = 370;\nUPDATE test SET num = 1113 WHERE id = 371;\nUPDATE test SET num = 1116 WHERE id = 372;\nUPDATE test SET num = 1119 WHERE id = 373;\nUPDATE test SET num = 1122 WHERE id = 374;\nUPDATE test SET num = 1125 WHERE id = 375;\nUPDATE test SET num = 1128 WHERE id = 376;\nUPDATE test SET num = 1131 WHERE id = 377;\nUPDATE test SET num = 1134 WHERE id = 378;\nUPDATE test SET num = 1137 WHERE id = 379;\nUPDATE test SET num = 1140 WHERE id = 380;\nUPDATE test SET num = 1143 WHERE id = 381;\nUPDATE test SET num = 1146 WHERE id = 382;\nUPDATE test SET num = 1149 WHERE id = 383;\nUPDATE test SET num = 1152 WHERE id = 384;\nUPDATE test SET num = 1155 WHERE id = 385;\nUPDATE test SET num = 1158 WHERE id = 386;\nUPDATE test SET num = 1161 WHERE id = 387;\nUPDATE test SET num = 1164 WHERE id = 388;\nUPDATE test SET num = 1167 WHERE id = 389;\nUPDATE test SET num = 1170 WHERE id = 390;\nUPDATE test SET num = 1173 WHERE id = 391;\nUPDATE test SET num = 1176 WHERE id = 392;\nUPDATE test SET num = 1179 WHERE id = 393;\nUPDATE test SET num = 1182 WHERE id = 394;\nUPDATE test SET num = 1185 WHERE id = 395;\nUPDATE test SET num = 1188 WHERE id = 396;\nUPDATE test SET num = 1191 WHERE id = 397;\nUPDATE test SET num = 1194 WHERE id = 398;\nUPDATE test SET num = 1197 WHERE id = 399;\nUPDATE test SET num = 1200 WHERE id = 400;\nUPDATE test SET num = 1203 WHERE id = 401;\nUPDATE test SET num = 1206 WHERE id = 402;\nUPDATE test SET num = 1209 WHERE id = 403;\nUPDATE test SET num = 1212 WHERE id = 404;\nUPDATE test SET num = 1215 WHERE id = 405;\nUPDATE test SET num = 1218 WHERE id = 406;\nUPDATE test SET num = 1221 WHERE id = 407;\nUPDATE test SET num = 1224 WHERE id = 408;\nUPDATE test SET num = 1227 WHERE id = 409;\nUPDATE test SET num = 1230 WHERE id = 410;\nUPDATE test SET num = 1233 WHERE id = 411;\nUPDATE test SET num = 1236 WHERE id = 412;\nUPDATE test SET num = 1239 WHERE id = 413;\nUPDATE test SET num = 1242 WHERE id = 414;\nUPDATE test SET num = 1245 WHERE id = 415;\nUPDATE test SET num = 1248 WHERE id = 416;\nUPDATE test SET num = 1251 WHERE id = 417;\nUPDATE test SET num = 1254 WHERE id = 418;\nUPDATE test SET num = 1257 WHERE id = 419;\nUPDATE test SET num = 1260 WHERE id = 420;\nUPDATE test SET num = 1263 WHERE id = 421;\nUPDATE test SET num = 1266 WHERE id = 422;\nUPDATE test SET num = 1269 WHERE id = 423;\nUPDATE test SET num = 1272 WHERE id = 424;\nUPDATE test SET num = 1275 WHERE id = 425;\nUPDATE test SET num = 1278 WHERE id = 426;\nUPDATE test SET num = 1281 WHERE id = 427;\nUPDATE test SET num = 1284 WHERE id = 428;\nUPDATE test SET num = 1287 WHERE id = 429;\nUPDATE test SET num = 1290 WHERE id = 430;\nUPDATE test SET num = 1293 WHERE id = 431;\nUPDATE test SET num = 1296 WHERE id = 432;\nUPDATE test SET num = 1299 WHERE id = 433;\nUPDATE test SET num = 1302 WHERE id = 434;\nUPDATE test SET num = 1305 WHERE id = 435;\nUPDATE test SET num = 1308 WHERE id = 436;\nUPDATE test SET num = 1311 WHERE id = 437;\nUPDATE test SET num = 1314 WHERE id = 438;\nUPDATE test SET num = 1317 WHERE id = 439;\nUPDATE test SET num = 1320 WHERE id = 440;\nUPDATE test SET num = 1323 WHERE id = 441;\nUPDATE test SET num = 1326 WHERE id = 442;\nUPDATE test SET num = 1329 WHERE id = 443;\nUPDATE test SET num = 1332 WHERE id = 444;\nUPDATE test SET num = 1335 WHERE id = 445;\nUPDATE test SET num = 1338 WHERE id = 446;\nUPDATE test SET num = 1341 WHERE id = 447;\nUPDATE test SET num = 1344 WHERE id = 448;\nUPDATE test SET num = 1347 WHERE id = 449;\nUPDATE test SET num = 1350 WHERE id = 450;\nUPDATE test SET num = 1353 WHERE id = 451;\nUPDATE test SET num = 1356 WHERE id = 452;\nUPDATE test SET num = 1359 WHERE id = 453;\nUPDATE test SET num = 1362 WHERE id = 454;\nUPDATE test SET num = 1365 WHERE id = 455;\nUPDATE test SET num = 1368 WHERE id = 456;\nUPDATE test SET num = 1371 WHERE id = 457;\nUPDATE test SET num = 1374 WHERE id = 458;\nUPDATE test SET num = 1377 WHERE id = 459;\nUPDATE test SET num = 1380 WHERE id = 460;\nUPDATE test SET num = 1383 WHERE id = 461;\nUPDATE test SET num = 1386 WHERE id = 462;\nUPDATE test SET num = 1389 WHERE id = 463;\nUPDATE test SET num = 1392 WHERE id = 464;\nUPDATE test SET num = 1395 WHERE id = 465;\nUPDATE test SET num = 1398 WHERE id = 466;\nUPDATE test SET num = 1401 WHERE id = 467;\nUPDATE test SET num = 1404 WHERE id = 468;\nUPDATE test SET num = 1407 WHERE id = 469;\nUPDATE test SET num = 1410 WHERE id = 470;\nUPDATE test SET num = 1413 WHERE id = 471;\nUPDATE test SET num = 1416 WHERE id = 472;\nUPDATE test SET num = 1419 WHERE id = 473;\nUPDATE test SET num = 1422 WHERE id = 474;\nUPDATE test SET num = 1425 WHERE id = 475;\nUPDATE test SET num = 1428 WHERE id = 476;\nUPDATE test SET num = 1431 WHERE id = 477;\nUPDATE test SET num = 1434 WHERE id = 478;\nUPDATE test SET num = 1437 WHERE id = 479;\nUPDATE test SET num = 1440 WHERE id = 480;\nUPDATE test SET num = 1443 WHERE id = 481;\nUPDATE test SET num = 1446 WHERE id = 482;\nUPDATE test SET num = 1449 WHERE id = 483;\nUPDATE test SET num = 1452 WHERE id = 484;\nUPDATE test SET num = 1455 WHERE id = 485;\nUPDATE test SET num = 1458 WHERE id = 486;\nUPDATE test SET num = 1461 WHERE id = 487;\nUPDATE test SET num = 1464 WHERE id = 488;\nUPDATE test SET num = 1467 WHERE id = 489;\nUPDATE test SET num = 1470 WHERE id = 490;\nUPDATE test SET num = 1473 WHERE id = 491;\nUPDATE test SET num = 1476 WHERE id = 492;\nUPDATE test SET num = 1479 WHERE id = 493;\nUPDATE test SET num = 1482 WHERE id = 494;\nUPDATE test SET num = 1485 WHERE id = 495;\nUPDATE test SET num = 1488 WHERE id = 496;\nUPDATE test SET num = 1491 WHERE id = 497;\nUPDATE test SET num = 1494 WHERE id = 498;\nUPDATE test SET num = 1497 WHERE id = 499;\nUPDATE test SET num = 1500 WHERE id = 500;\nUPDATE test SET num = 1503 WHERE id = 501;\nUPDATE test SET num = 1506 WHERE id = 502;\nUPDATE test SET num = 1509 WHERE id = 503;\nUPDATE test SET num = 1512 WHERE id = 504;\nUPDATE test SET num = 1515 WHERE id = 505;\nUPDATE test SET num = 1518 WHERE id = 506;\nUPDATE test SET num = 1521 WHERE id = 507;\nUPDATE test SET num = 1524 WHERE id = 508;\nUPDATE test SET num = 1527 WHERE id = 509;\nUPDATE test SET num = 1530 WHERE id = 510;\nUPDATE test SET num = 1533 WHERE id = 511;\nUPDATE test SET num = 1536 WHERE id = 512;\nUPDATE test SET num = 1539 WHERE id = 513;\nUPDATE test SET num = 1542 WHERE id = 514;\nUPDATE test SET num = 1545 WHERE id = 515;\nUPDATE test SET num = 1548 WHERE id = 516;\nUPDATE test SET num = 1551 WHERE id = 517;\nUPDATE test SET num = 1554 WHERE id = 518;\nUPDATE test SET num = 1557 WHERE id = 519;\nUPDATE test SET num = 1560 WHERE id = 520;\nUPDATE test SET num = 1563 WHERE id = 521;\nUPDATE test SET num = 1566 WHERE id = 522;\nUPDATE test SET num = 1569 WHERE id = 523;\nUPDATE test SET num = 1572 WHERE id = 524;\nUPDATE test SET num = 1575 WHERE id = 525;\nUPDATE test SET num = 1578 WHERE id = 526;\nUPDATE test SET num = 1581 WHERE id = 527;\nUPDATE test SET num = 1584 WHERE id = 528;\nUPDATE test SET num = 1587 WHERE id = 529;\nUPDATE test SET num = 1590 WHERE id = 530;\nUPDATE test SET num = 1593 WHERE id = 531;\nUPDATE test SET num = 1596 WHERE id = 532;\nUPDATE test SET num = 1599 WHERE id = 533;\nUPDATE test SET num = 1602 WHERE id = 534;\nUPDATE test SET num = 1605 WHERE id = 535;\nUPDATE test SET num = 1608 WHERE id = 536;\nUPDATE test SET num = 1611 WHERE id = 537;\nUPDATE test SET num = 1614 WHERE id = 538;\nUPDATE test SET num = 1617 WHERE id = 539;\nUPDATE test SET num = 1620 WHERE id = 540;\nUPDATE test SET num = 1623 WHERE id = 541;\nUPDATE test SET num = 1626 WHERE id = 542;\nUPDATE test SET num = 1629 WHERE id = 543;\nUPDATE test SET num = 1632 WHERE id = 544;\nUPDATE test SET num = 1635 WHERE id = 545;\nUPDATE test SET num = 1638 WHERE id = 546;\nUPDATE test SET num = 1641 WHERE id = 547;\nUPDATE test SET num = 1644 WHERE id = 548;\nUPDATE test SET num = 1647 WHERE id = 549;\nUPDATE test SET num = 1650 WHERE id = 550;\nUPDATE test SET num = 1653 WHERE id = 551;\nUPDATE test SET num = 1656 WHERE id = 552;\nUPDATE test SET num = 1659 WHERE id = 553;\nUPDATE test SET num = 1662 WHERE id = 554;\nUPDATE test SET num = 1665 WHERE id = 555;\nUPDATE test SET num = 1668 WHERE id = 556;\nUPDATE test SET num = 1671 WHERE id = 557;\nUPDATE test SET num = 1674 WHERE id = 558;\nUPDATE test SET num = 1677 WHERE id = 559;\nUPDATE test SET num = 1680 WHERE id = 560;\nUPDATE test SET num = 1683 WHERE id = 561;\nUPDATE test SET num = 1686 WHERE id = 562;\nUPDATE test SET num = 1689 WHERE id = 563;\nUPDATE test SET num = 1692 WHERE id = 564;\nUPDATE test SET num = 1695 WHERE id = 565;\nUPDATE test SET num = 1698 WHERE id = 566;\nUPDATE test SET num = 1701 WHERE id = 567;\nUPDATE test SET num = 1704 WHERE id = 568;\nUPDATE test SET num = 1707 WHERE id = 569;\nUPDATE test SET num = 1710 WHERE id = 570;\nUPDATE test SET num = 1713 WHERE id = 571;\nUPDATE test SET num = 1716 WHERE id = 572;\nUPDATE test SET num = 1719 WHERE id = 573;\nUPDATE test SET num = 1722 WHERE id = 574;\nUPDATE test SET num = 1725 WHERE id = 575;\nUPDATE test SET num = 1728 WHERE id = 576;\nUPDATE test SET num = 1731 WHERE id = 577;\nUPDATE test SET num = 1734 WHERE id = 578;\nUPDATE test SET num = 1737 WHERE id = 579;\nUPDATE test SET num = 1740 WHERE id = 580;\nUPDATE test SET num = 1743 WHERE id = 581;\nUPDATE test SET num = 1746 WHERE id = 582;\nUPDATE test SET num = 1749 WHERE id = 583;\nUPDATE test SET num = 1752 WHERE id = 584;\nUPDATE test SET num = 1755 WHERE id = 585;\nUPDATE test SET num = 1758 WHERE id = 586;\nUPDATE test SET num = 1761 WHERE id = 587;\nUPDATE test SET num = 1764 WHERE id = 588;\nUPDATE test SET num = 1767 WHERE id = 589;\nUPDATE test SET num = 1770 WHERE id = 590;\nUPDATE test SET num = 1773 WHERE id = 591;\nUPDATE test SET num = 1776 WHERE id = 592;\nUPDATE test SET num = 1779 WHERE id = 593;\nUPDATE test SET num = 1782 WHERE id = 594;\nUPDATE test SET num = 1785 WHERE id = 595;\nUPDATE test SET num = 1788 WHERE id = 596;\nUPDATE test SET num = 1791 WHERE id = 597;\nUPDATE test SET num = 1794 WHERE id = 598;\nUPDATE test SET num = 1797 WHERE id = 599;\nUPDATE test SET num = 1800 WHERE id = 600;\nUPDATE test SET num = 1803 WHERE id = 601;\nUPDATE test SET num = 1806 WHERE id = 602;\nUPDATE test SET num = 1809 WHERE id = 603;\nUPDATE test SET num = 1812 WHERE id = 604;\nUPDATE test SET num = 1815 WHERE id = 605;\nUPDATE test SET num = 1818 WHERE id = 606;\nUPDATE test SET num = 1821 WHERE id = 607;\nUPDATE test SET num = 1824 WHERE id = 608;\nUPDATE test SET num = 1827 WHERE id = 609;\nUPDATE test SET num = 1830 WHERE id = 610;\nUPDATE test SET num = 1833 WHERE id = 611;\nUPDATE test SET num = 1836 WHERE id = 612;\nUPDATE test SET num = 1839 WHERE id = 613;\nUPDATE test SET num = 1842 WHERE id = 614;\nUPDATE test SET num = 1845 WHERE id = 615;\nUPDATE test SET num = 1848 WHERE id = 616;\nUPDATE test SET num = 1851 WHERE id = 617;\nUPDATE test SET num = 1854 WHERE id = 618;\nUPDATE test SET num = 1857 WHERE id = 619;\nUPDATE test SET num = 1860 WHERE id = 620;\nUPDATE test SET num = 1863 WHERE id = 621;\nUPDATE test SET num = 1866 WHERE id = 622;\nUPDATE test SET num = 1869 WHERE id = 623;\nUPDATE test SET num = 1872 WHERE id = 624;\nUPDATE test SET num = 1875 WHERE id = 625;\nUPDATE test SET num = 1878 WHERE id = 626;\nUPDATE test SET num = 1881 WHERE id = 627;\nUPDATE test SET num = 1884 WHERE id = 628;\nUPDATE test SET num = 1887 WHERE id = 629;\nUPDATE test SET num = 1890 WHERE id = 630;\nUPDATE test SET num = 1893 WHERE id = 631;\nUPDATE test SET num = 1896 WHERE id = 632;\nUPDATE test SET num = 1899 WHERE id = 633;\nUPDATE test SET num = 1902 WHERE id = 634;\nUPDATE test SET num = 1905 WHERE id = 635;\nUPDATE test SET num = 1908 WHERE id = 636;\nUPDATE test SET num = 1911 WHERE id = 637;\nUPDATE test SET num = 1914 WHERE id = 638;\nUPDATE test SET num = 1917 WHERE id = 639;\nUPDATE test SET num = 1920 WHERE id = 640;\nUPDATE test SET num = 1923 WHERE id = 641;\nUPDATE test SET num = 1926 WHERE id = 642;\nUPDATE test SET num = 1929 WHERE id = 643;\nUPDATE test SET num = 1932 WHERE id = 644;\nUPDATE test SET num = 1935 WHERE id = 645;\nUPDATE test SET num = 1938 WHERE id = 646;\nUPDATE test SET num = 1941 WHERE id = 647;\nUPDATE test SET num = 1944 WHERE id = 648;\nUPDATE test SET num = 1947 WHERE id = 649;\nUPDATE test SET num = 1950 WHERE id = 650;\nUPDATE test SET num = 1953 WHERE id = 651;\nUPDATE test SET num = 1956 WHERE id = 652;\nUPDATE test SET num = 1959 WHERE id = 653;\nUPDATE test SET num = 1962 WHERE id = 654;\nUPDATE test SET num = 1965 WHERE id = 655;\nUPDATE test SET num = 1968 WHERE id = 656;\nUPDATE test SET num = 1971 WHERE id = 657;\nUPDATE test SET num = 1974 WHERE id = 658;\nUPDATE test SET num = 1977 WHERE id = 659;\nUPDATE test SET num = 1980 WHERE id = 660;\nUPDATE test SET num = 1983 WHERE id = 661;\nUPDATE test SET num = 1986 WHERE id = 662;\nUPDATE test SET num = 1989 WHERE id = 663;\nUPDATE test SET num = 1992 WHERE id = 664;\nUPDATE test SET num = 1995 WHERE id = 665;\nUPDATE test SET num = 1998 WHERE id = 666;\nUPDATE test SET num = 2001 WHERE id = 667;\nUPDATE test SET num = 2004 WHERE id = 668;\nUPDATE test SET num = 2007 WHERE id = 669;\nUPDATE test SET num = 2010 WHERE id = 670;\nUPDATE test SET num = 2013 WHERE id = 671;\nUPDATE test SET num = 2016 WHERE id = 672;\nUPDATE test SET num = 2019 WHERE id = 673;\nUPDATE test SET num = 2022 WHERE id = 674;\nUPDATE test SET num = 2025 WHERE id = 675;\nUPDATE test SET num = 2028 WHERE id = 676;\nUPDATE test SET num = 2031 WHERE id = 677;\nUPDATE test SET num = 2034 WHERE id = 678;\nUPDATE test SET num = 2037 WHERE id = 679;\nUPDATE test SET num = 2040 WHERE id = 680;\nUPDATE test SET num = 2043 WHERE id = 681;\nUPDATE test SET num = 2046 WHERE id = 682;\nUPDATE test SET num = 2049 WHERE id = 683;\nUPDATE test SET num = 2052 WHERE id = 684;\nUPDATE test SET num = 2055 WHERE id = 685;\nUPDATE test SET num = 2058 WHERE id = 686;\nUPDATE test SET num = 2061 WHERE id = 687;\nUPDATE test SET num = 2064 WHERE id = 688;\nUPDATE test SET num = 2067 WHERE id = 689;\nUPDATE test SET num = 2070 WHERE id = 690;\nUPDATE test SET num = 2073 WHERE id = 691;\nUPDATE test SET num = 2076 WHERE id = 692;\nUPDATE test SET num = 2079 WHERE id = 693;\nUPDATE test SET num = 2082 WHERE id = 694;\nUPDATE test SET num = 2085 WHERE id = 695;\nUPDATE test SET num = 2088 WHERE id = 696;\nUPDATE test SET num = 2091 WHERE id = 697;\nUPDATE test SET num = 2094 WHERE id = 698;\nUPDATE test SET num = 2097 WHERE id = 699;\nUPDATE test SET num = 2100 WHERE id = 700;\nUPDATE test SET num = 2103 WHERE id = 701;\nUPDATE test SET num = 2106 WHERE id = 702;\nUPDATE test SET num = 2109 WHERE id = 703;\nUPDATE test SET num = 2112 WHERE id = 704;\nUPDATE test SET num = 2115 WHERE id = 705;\nUPDATE test SET num = 2118 WHERE id = 706;\nUPDATE test SET num = 2121 WHERE id = 707;\nUPDATE test SET num = 2124 WHERE id = 708;\nUPDATE test SET num = 2127 WHERE id = 709;\nUPDATE test SET num = 2130 WHERE id = 710;\nUPDATE test SET num = 2133 WHERE id = 711;\nUPDATE test SET num = 2136 WHERE id = 712;\nUPDATE test SET num = 2139 WHERE id = 713;\nUPDATE test SET num = 2142 WHERE id = 714;\nUPDATE test SET num = 2145 WHERE id = 715;\nUPDATE test SET num = 2148 WHERE id = 716;\nUPDATE test SET num = 2151 WHERE id = 717;\nUPDATE test SET num = 2154 WHERE id = 718;\nUPDATE test SET num = 2157 WHERE id = 719;\nUPDATE test SET num = 2160 WHERE id = 720;\nUPDATE test SET num = 2163 WHERE id = 721;\nUPDATE test SET num = 2166 WHERE id = 722;\nUPDATE test SET num = 2169 WHERE id = 723;\nUPDATE test SET num = 2172 WHERE id = 724;\nUPDATE test SET num = 2175 WHERE id = 725;\nUPDATE test SET num = 2178 WHERE id = 726;\nUPDATE test SET num = 2181 WHERE id = 727;\nUPDATE test SET num = 2184 WHERE id = 728;\nUPDATE test SET num = 2187 WHERE id = 729;\nUPDATE test SET num = 2190 WHERE id = 730;\nUPDATE test SET num = 2193 WHERE id = 731;\nUPDATE test SET num = 2196 WHERE id = 732;\nUPDATE test SET num = 2199 WHERE id = 733;\nUPDATE test SET num = 2202 WHERE id = 734;\nUPDATE test SET num = 2205 WHERE id = 735;\nUPDATE test SET num = 2208 WHERE id = 736;\nUPDATE test SET num = 2211 WHERE id = 737;\nUPDATE test SET num = 2214 WHERE id = 738;\nUPDATE test SET num = 2217 WHERE id = 739;\nUPDATE test SET num = 2220 WHERE id = 740;\nUPDATE test SET num = 2223 WHERE id = 741;\nUPDATE test SET num = 2226 WHERE id = 742;\nUPDATE test SET num = 2229 WHERE id = 743;\nUPDATE test SET num = 2232 WHERE id = 744;\nUPDATE test SET num = 2235 WHERE id = 745;\nUPDATE test SET num = 2238 WHERE id = 746;\nUPDATE test SET num = 2241 WHERE id = 747;\nUPDATE test SET num = 2244 WHERE id = 748;\nUPDATE test SET num = 2247 WHERE id = 749;\nUPDATE test SET num = 2250 WHERE id = 750;\nUPDATE test SET num = 2253 WHERE id = 751;\nUPDATE test SET num = 2256 WHERE id = 752;\nUPDATE test SET num = 2259 WHERE id = 753;\nUPDATE test SET num = 2262 WHERE id = 754;\nUPDATE test SET num = 2265 WHERE id = 755;\nUPDATE test SET num = 2268 WHERE id = 756;\nUPDATE test SET num = 2271 WHERE id = 757;\nUPDATE test SET num = 2274 WHERE id = 758;\nUPDATE test SET num = 2277 WHERE id = 759;\nUPDATE test SET num = 2280 WHERE id = 760;\nUPDATE test SET num = 2283 WHERE id = 761;\nUPDATE test SET num = 2286 WHERE id = 762;\nUPDATE test SET num = 2289 WHERE id = 763;\nUPDATE test SET num = 2292 WHERE id = 764;\nUPDATE test SET num = 2295 WHERE id = 765;\nUPDATE test SET num = 2298 WHERE id = 766;\nUPDATE test SET num = 2301 WHERE id = 767;\nUPDATE test SET num = 2304 WHERE id = 768;\nUPDATE test SET num = 2307 WHERE id = 769;\nUPDATE test SET num = 2310 WHERE id = 770;\nUPDATE test SET num = 2313 WHERE id = 771;\nUPDATE test SET num = 2316 WHERE id = 772;\nUPDATE test SET num = 2319 WHERE id = 773;\nUPDATE test SET num = 2322 WHERE id = 774;\nUPDATE test SET num = 2325 WHERE id = 775;\nUPDATE test SET num = 2328 WHERE id = 776;\nUPDATE test SET num = 2331 WHERE id = 777;\nUPDATE test SET num = 2334 WHERE id = 778;\nUPDATE test SET num = 2337 WHERE id = 779;\nUPDATE test SET num = 2340 WHERE id = 780;\nUPDATE test SET num = 2343 WHERE id = 781;\nUPDATE test SET num = 2346 WHERE id = 782;\nUPDATE test SET num = 2349 WHERE id = 783;\nUPDATE test SET num = 2352 WHERE id = 784;\nUPDATE test SET num = 2355 WHERE id = 785;\nUPDATE test SET num = 2358 WHERE id = 786;\nUPDATE test SET num = 2361 WHERE id = 787;\nUPDATE test SET num = 2364 WHERE id = 788;\nUPDATE test SET num = 2367 WHERE id = 789;\nUPDATE test SET num = 2370 WHERE id = 790;\nUPDATE test SET num = 2373 WHERE id = 791;\nUPDATE test SET num = 2376 WHERE id = 792;\nUPDATE test SET num = 2379 WHERE id = 793;\nUPDATE test SET num = 2382 WHERE id = 794;\nUPDATE test SET num = 2385 WHERE id = 795;\nUPDATE test SET num = 2388 WHERE id = 796;\nUPDATE test SET num = 2391 WHERE id = 797;\nUPDATE test SET num = 2394 WHERE id = 798;\nUPDATE test SET num = 2397 WHERE id = 799;\nUPDATE test SET num = 2400 WHERE id = 800;\nUPDATE test SET num = 2403 WHERE id = 801;\nUPDATE test SET num = 2406 WHERE id = 802;\nUPDATE test SET num = 2409 WHERE id = 803;\nUPDATE test SET num = 2412 WHERE id = 804;\nUPDATE test SET num = 2415 WHERE id = 805;\nUPDATE test SET num = 2418 WHERE id = 806;\nUPDATE test SET num = 2421 WHERE id = 807;\nUPDATE test SET num = 2424 WHERE id = 808;\nUPDATE test SET num = 2427 WHERE id = 809;\nUPDATE test SET num = 2430 WHERE id = 810;\nUPDATE test SET num = 2433 WHERE id = 811;\nUPDATE test SET num = 2436 WHERE id = 812;\nUPDATE test SET num = 2439 WHERE id = 813;\nUPDATE test SET num = 2442 WHERE id = 814;\nUPDATE test SET num = 2445 WHERE id = 815;\nUPDATE test SET num = 2448 WHERE id = 816;\nUPDATE test SET num = 2451 WHERE id = 817;\nUPDATE test SET num = 2454 WHERE id = 818;\nUPDATE test SET num = 2457 WHERE id = 819;\nUPDATE test SET num = 2460 WHERE id = 820;\nUPDATE test SET num = 2463 WHERE id = 821;\nUPDATE test SET num = 2466 WHERE id = 822;\nUPDATE test SET num = 2469 WHERE id = 823;\nUPDATE test SET num = 2472 WHERE id = 824;\nUPDATE test SET num = 2475 WHERE id = 825;\nUPDATE test SET num = 2478 WHERE id = 826;\nUPDATE test SET num = 2481 WHERE id = 827;\nUPDATE test SET num = 2484 WHERE id = 828;\nUPDATE test SET num = 2487 WHERE id = 829;\nUPDATE test SET num = 2490 WHERE id = 830;\nUPDATE test SET num = 2493 WHERE id = 831;\nUPDATE test SET num = 2496 WHERE id = 832;\nUPDATE test SET num = 2499 WHERE id = 833;\nUPDATE test SET num = 2502 WHERE id = 834;\nUPDATE test SET num = 2505 WHERE id = 835;\nUPDATE test SET num = 2508 WHERE id = 836;\nUPDATE test SET num = 2511 WHERE id = 837;\nUPDATE test SET num = 2514 WHERE id = 838;\nUPDATE test SET num = 2517 WHERE id = 839;\nUPDATE test SET num = 2520 WHERE id = 840;\nUPDATE test SET num = 2523 WHERE id = 841;\nUPDATE test SET num = 2526 WHERE id = 842;\nUPDATE test SET num = 2529 WHERE id = 843;\nUPDATE test SET num = 2532 WHERE id = 844;\nUPDATE test SET num = 2535 WHERE id = 845;\nUPDATE test SET num = 2538 WHERE id = 846;\nUPDATE test SET num = 2541 WHERE id = 847;\nUPDATE test SET num = 2544 WHERE id = 848;\nUPDATE test SET num = 2547 WHERE id = 849;\nUPDATE test SET num = 2550 WHERE id = 850;\nUPDATE test SET num = 2553 WHERE id = 851;\nUPDATE test SET num = 2556 WHERE id = 852;\nUPDATE test SET num = 2559 WHERE id = 853;\nUPDATE test SET num = 2562 WHERE id = 854;\nUPDATE test SET num = 2565 WHERE id = 855;\nUPDATE test SET num = 2568 WHERE id = 856;\nUPDATE test SET num = 2571 WHERE id = 857;\nUPDATE test SET num = 2574 WHERE id = 858;\nUPDATE test SET num = 2577 WHERE id = 859;\nUPDATE test SET num = 2580 WHERE id = 860;\nUPDATE test SET num = 2583 WHERE id = 861;\nUPDATE test SET num = 2586 WHERE id = 862;\nUPDATE test SET num = 2589 WHERE id = 863;\nUPDATE test SET num = 2592 WHERE id = 864;\nUPDATE test SET num = 2595 WHERE id = 865;\nUPDATE test SET num = 2598 WHERE id = 866;\nUPDATE test SET num = 2601 WHERE id = 867;\nUPDATE test SET num = 2604 WHERE id = 868;\nUPDATE test SET num = 2607 WHERE id = 869;\nUPDATE test SET num = 2610 WHERE id = 870;\nUPDATE test SET num = 2613 WHERE id = 871;\nUPDATE test SET num = 2616 WHERE id = 872;\nUPDATE test SET num = 2619 WHERE id = 873;\nUPDATE test SET num = 2622 WHERE id = 874;\nUPDATE test SET num = 2625 WHERE id = 875;\nUPDATE test SET num = 2628 WHERE id = 876;\nUPDATE test SET num = 2631 WHERE id = 877;\nUPDATE test SET num = 2634 WHERE id = 878;\nUPDATE test SET num = 2637 WHERE id = 879;\nUPDATE test SET num = 2640 WHERE id = 880;\nUPDATE test SET num = 2643 WHERE id = 881;\nUPDATE test SET num = 2646 WHERE id = 882;\nUPDATE test SET num = 2649 WHERE id = 883;\nUPDATE test SET num = 2652 WHERE id = 884;\nUPDATE test SET num = 2655 WHERE id = 885;\nUPDATE test SET num = 2658 WHERE id = 886;\nUPDATE test SET num = 2661 WHERE id = 887;\nUPDATE test SET num = 2664 WHERE id = 888;\nUPDATE test SET num = 2667 WHERE id = 889;\nUPDATE test SET num = 2670 WHERE id = 890;\nUPDATE test SET num = 2673 WHERE id = 891;\nUPDATE test SET num = 2676 WHERE id = 892;\nUPDATE test SET num = 2679 WHERE id = 893;\nUPDATE test SET num = 2682 WHERE id = 894;\nUPDATE test SET num = 2685 WHERE id = 895;\nUPDATE test SET num = 2688 WHERE id = 896;\nUPDATE test SET num = 2691 WHERE id = 897;\nUPDATE test SET num = 2694 WHERE id = 898;\nUPDATE test SET num = 2697 WHERE id = 899;\nUPDATE test SET num = 2700 WHERE id = 900;\nUPDATE test SET num = 2703 WHERE id = 901;\nUPDATE test SET num = 2706 WHERE id = 902;\nUPDATE test SET num = 2709 WHERE id = 903;\nUPDATE test SET num = 2712 WHERE id = 904;\nUPDATE test SET num = 2715 WHERE id = 905;\nUPDATE test SET num = 2718 WHERE id = 906;\nUPDATE test SET num = 2721 WHERE id = 907;\nUPDATE test SET num = 2724 WHERE id = 908;\nUPDATE test SET num = 2727 WHERE id = 909;\nUPDATE test SET num = 2730 WHERE id = 910;\nUPDATE test SET num = 2733 WHERE id = 911;\nUPDATE test SET num = 2736 WHERE id = 912;\nUPDATE test SET num = 2739 WHERE id = 913;\nUPDATE test SET num = 2742 WHERE id = 914;\nUPDATE test SET num = 2745 WHERE id = 915;\nUPDATE test SET num = 2748 WHERE id = 916;\nUPDATE test SET num = 2751 WHERE id = 917;\nUPDATE test SET num = 2754 WHERE id = 918;\nUPDATE test SET num = 2757 WHERE id = 919;\nUPDATE test SET num = 2760 WHERE id = 920;\nUPDATE test SET num = 2763 WHERE id = 921;\nUPDATE test SET num = 2766 WHERE id = 922;\nUPDATE test SET num = 2769 WHERE id = 923;\nUPDATE test SET num = 2772 WHERE id = 924;\nUPDATE test SET num = 2775 WHERE id = 925;\nUPDATE test SET num = 2778 WHERE id = 926;\nUPDATE test SET num = 2781 WHERE id = 927;\nUPDATE test SET num = 2784 WHERE id = 928;\nUPDATE test SET num = 2787 WHERE id = 929;\nUPDATE test SET num = 2790 WHERE id = 930;\nUPDATE test SET num = 2793 WHERE id = 931;\nUPDATE test SET num = 2796 WHERE id = 932;\nUPDATE test SET num = 2799 WHERE id = 933;\nUPDATE test SET num = 2802 WHERE id = 934;\nUPDATE test SET num = 2805 WHERE id = 935;\nUPDATE test SET num = 2808 WHERE id = 936;\nUPDATE test SET num = 2811 WHERE id = 937;\nUPDATE test SET num = 2814 WHERE id = 938;\nUPDATE test SET num = 2817 WHERE id = 939;\nUPDATE test SET num = 2820 WHERE id = 940;\nUPDATE test SET num = 2823 WHERE id = 941;\nUPDATE test SET num = 2826 WHERE id = 942;\nUPDATE test SET num = 2829 WHERE id = 943;\nUPDATE test SET num = 2832 WHERE id = 944;\nUPDATE test SET num = 2835 WHERE id = 945;\nUPDATE test SET num = 2838 WHERE id = 946;\nUPDATE test SET num = 2841 WHERE id = 947;\nUPDATE test SET num = 2844 WHERE id = 948;\nUPDATE test SET num = 2847 WHERE id = 949;\nUPDATE test SET num = 2850 WHERE id = 950;\nUPDATE test SET num = 2853 WHERE id = 951;\nUPDATE test SET num = 2856 WHERE id = 952;\nUPDATE test SET num = 2859 WHERE id = 953;\nUPDATE test SET num = 2862 WHERE id = 954;\nUPDATE test SET num = 2865 WHERE id = 955;\nUPDATE test SET num = 2868 WHERE id = 956;\nUPDATE test SET num = 2871 WHERE id = 957;\nUPDATE test SET num = 2874 WHERE id = 958;\nUPDATE test SET num = 2877 WHERE id = 959;\nUPDATE test SET num = 2880 WHERE id = 960;\nUPDATE test SET num = 2883 WHERE id = 961;\nUPDATE test SET num = 2886 WHERE id = 962;\nUPDATE test SET num = 2889 WHERE id = 963;\nUPDATE test SET num = 2892 WHERE id = 964;\nUPDATE test SET num = 2895 WHERE id = 965;\nUPDATE test SET num = 2898 WHERE id = 966;\nUPDATE test SET num = 2901 WHERE id = 967;\nUPDATE test SET num = 2904 WHERE id = 968;\nUPDATE test SET num = 2907 WHERE id = 969;\nUPDATE test SET num = 2910 WHERE id = 970;\nUPDATE test SET num = 2913 WHERE id = 971;\nUPDATE test SET num = 2916 WHERE id = 972;\nUPDATE test SET num = 2919 WHERE id = 973;\nUPDATE test SET num = 2922 WHERE id = 974;\nUPDATE test SET num = 2925 WHERE id = 975;\nUPDATE test SET num = 2928 WHERE id = 976;\nUPDATE test SET num = 2931 WHERE id = 977;\nUPDATE test SET num = 2934 WHERE id = 978;\nUPDATE test SET num = 2937 WHERE id = 979;\nUPDATE test SET num = 2940 WHERE id = 980;\nUPDATE test SET num = 2943 WHERE id = 981;\nUPDATE test SET num = 2946 WHERE id = 982;\nUPDATE test SET num = 2949 WHERE id = 983;\nUPDATE test SET num = 2952 WHERE id = 984;\nUPDATE test SET num = 2955 WHERE id = 985;\nUPDATE test SET num = 2958 WHERE id = 986;\nUPDATE test SET num = 2961 WHERE id = 987;\nUPDATE test SET num = 2964 WHERE id = 988;\nUPDATE test SET num = 2967 WHERE id = 989;\nUPDATE test SET num = 2970 WHERE id = 990;\nUPDATE test SET num = 2973 WHERE id = 991;\nUPDATE test SET num = 2976 WHERE id = 992;\nUPDATE test SET num = 2979 WHERE id = 993;\nUPDATE test SET num = 2982 WHERE id = 994;\nUPDATE test SET num = 2985 WHERE id = 995;\nUPDATE test SET num = 2988 WHERE id = 996;\nUPDATE test SET num = 2991 WHERE id = 997;\nUPDATE test SET num = 2994 WHERE id = 998;\nUPDATE test SET num = 2997 WHERE id = 999;\nSELECT * FROM test;\nDELETE FROM test WHERE id = 0;\nDELETE FROM test WHERE id = 1;\nDELETE FROM test WHERE id = 2;\nDELETE FROM test WHERE id = 3;\nDELETE FROM test WHERE id = 4;\nDELETE FROM test WHERE id = 5;\nDELETE FROM test WHERE id = 6;\nDELETE FROM test WHERE id = 7;\nDELETE FROM test WHERE id = 8;\nDELETE FROM test WHERE id = 9;\nDELETE FROM test WHERE id = 10;\nDELETE FROM test WHERE id = 11;\nDELETE FROM test WHERE id = 12;\nDELETE FROM test WHERE id = 13;\nDELETE FROM test WHERE id = 14;\nDELETE FROM test WHERE id = 15;\nDELETE FROM test WHERE id = 16;\nDELETE FROM test WHERE id = 17;\nDELETE FROM test WHERE id = 18;\nDELETE FROM test WHERE id = 19;\nDELETE FROM test WHERE id = 20;\nDELETE FROM test WHERE id = 21;\nDELETE FROM test WHERE id = 22;\nDELETE FROM test WHERE id = 23;\nDELETE FROM test WHERE id = 24;\nDELETE FROM test WHERE id = 25;\nDELETE FROM test WHERE id = 26;\nDELETE FROM test WHERE id = 27;\nDELETE FROM test WHERE id = 28;\nDELETE FROM test WHERE id = 29;\nDELETE FROM test WHERE id = 30;\nDELETE FROM test WHERE id = 31;\nDELETE FROM test WHERE id = 32;\nDELETE FROM test WHERE id = 33;\nDELETE FROM test WHERE id = 34;\nDELETE FROM test WHERE id = 35;\nDELETE FROM test WHERE id = 36;\nDELETE FROM test WHERE id = 37;\nDELETE FROM test WHERE id = 38;\nDELETE FROM test WHERE id = 39;\nDELETE FROM test WHERE id = 40;\nDELETE FROM test WHERE id = 41;\nDELETE FROM test WHERE id = 42;\nDELETE FROM test WHERE id = 43;\nDELETE FROM test WHERE id = 44;\nDELETE FROM test WHERE id = 45;\nDELETE FROM test WHERE id = 46;\nDELETE FROM test WHERE id = 47;\nDELETE FROM test WHERE id = 48;\nDELETE FROM test WHERE id = 49;\nDELETE FROM test WHERE id = 50;\nDELETE FROM test WHERE id = 51;\nDELETE FROM test WHERE id = 52;\nDELETE FROM test WHERE id = 53;\nDELETE FROM test WHERE id = 54;\nDELETE FROM test WHERE id = 55;\nDELETE FROM test WHERE id = 56;\nDELETE FROM test WHERE id = 57;\nDELETE FROM test WHERE id = 58;\nDELETE FROM test WHERE id = 59;\nDELETE FROM test WHERE id = 60;\nDELETE FROM test WHERE id = 61;\nDELETE FROM test WHERE id = 62;\nDELETE FROM test WHERE id = 63;\nDELETE FROM test WHERE id = 64;\nDELETE FROM test WHERE id = 65;\nDELETE FROM test WHERE id = 66;\nDELETE FROM test WHERE id = 67;\nDELETE FROM test WHERE id = 68;\nDELETE FROM test WHERE id = 69;\nDELETE FROM test WHERE id = 70;\nDELETE FROM test WHERE id = 71;\nDELETE FROM test WHERE id = 72;\nDELETE FROM test WHERE id = 73;\nDELETE FROM test WHERE id = 74;\nDELETE FROM test WHERE id = 75;\nDELETE FROM test WHERE id = 76;\nDELETE FROM test WHERE id = 77;\nDELETE FROM test WHERE id = 78;\nDELETE FROM test WHERE id = 79;\nDELETE FROM test WHERE id = 80;\nDELETE FROM test WHERE id = 81;\nDELETE FROM test WHERE id = 82;\nDELETE FROM test WHERE id = 83;\nDELETE FROM test WHERE id = 84;\nDELETE FROM test WHERE id = 85;\nDELETE FROM test WHERE id = 86;\nDELETE FROM test WHERE id = 87;\nDELETE FROM test WHERE id = 88;\nDELETE FROM test WHERE id = 89;\nDELETE FROM test WHERE id = 90;\nDELETE FROM test WHERE id = 91;\nDELETE FROM test WHERE id = 92;\nDELETE FROM test WHERE id = 93;\nDELETE FROM test WHERE id = 94;\nDELETE FROM test WHERE id = 95;\nDELETE FROM test WHERE id = 96;\nDELETE FROM test WHERE id = 97;\nDELETE FROM test WHERE id = 98;\nDELETE FROM test WHERE id = 99;\nDELETE FROM test WHERE id = 100;\nDELETE FROM test WHERE id = 101;\nDELETE FROM test WHERE id = 102;\nDELETE FROM test WHERE id = 103;\nDELETE FROM test WHERE id = 104;\nDELETE FROM test WHERE id = 105;\nDELETE FROM test WHERE id = 106;\nDELETE FROM test WHERE id = 107;\nDELETE FROM test WHERE id = 108;\nDELETE FROM test WHERE id = 109;\nDELETE FROM test WHERE id = 110;\nDELETE FROM test WHERE id = 111;\nDELETE FROM test WHERE id = 112;\nDELETE FROM test WHERE id = 113;\nDELETE FROM test WHERE id = 114;\nDELETE FROM test WHERE id = 115;\nDELETE FROM test WHERE id = 116;\nDELETE FROM test WHERE id = 117;\nDELETE FROM test WHERE id = 118;\nDELETE FROM test WHERE id = 119;\nDELETE FROM test WHERE id = 120;\nDELETE FROM test WHERE id = 121;\nDELETE FROM test WHERE id = 122;\nDELETE FROM test WHERE id = 123;\nDELETE FROM test WHERE id = 124;\nDELETE FROM test WHERE id = 125;\nDELETE FROM test WHERE id = 126;\nDELETE FROM test WHERE id = 127;\nDELETE FROM test WHERE id = 128;\nDELETE FROM test WHERE id = 129;\nDELETE FROM test WHERE id = 130;\nDELETE FROM test WHERE id = 131;\nDELETE FROM test WHERE id = 132;\nDELETE FROM test WHERE id = 133;\nDELETE FROM test WHERE id = 134;\nDELETE FROM test WHERE id = 135;\nDELETE FROM test WHERE id = 136;\nDELETE FROM test WHERE id = 137;\nDELETE FROM test WHERE id = 138;\nDELETE FROM test WHERE id = 139;\nDELETE FROM test WHERE id = 140;\nDELETE FROM test WHERE id = 141;\nDELETE FROM test WHERE id = 142;\nDELETE FROM test WHERE id = 143;\nDELETE FROM test WHERE id = 144;\nDELETE FROM test WHERE id = 145;\nDELETE FROM test WHERE id = 146;\nDELETE FROM test WHERE id = 147;\nDELETE FROM test WHERE id = 148;\nDELETE FROM test WHERE id = 149;\nDELETE FROM test WHERE id = 150;\nDELETE FROM test WHERE id = 151;\nDELETE FROM test WHERE id = 152;\nDELETE FROM test WHERE id = 153;\nDELETE FROM test WHERE id = 154;\nDELETE FROM test WHERE id = 155;\nDELETE FROM test WHERE id = 156;\nDELETE FROM test WHERE id = 157;\nDELETE FROM test WHERE id = 158;\nDELETE FROM test WHERE id = 159;\nDELETE FROM test WHERE id = 160;\nDELETE FROM test WHERE id = 161;\nDELETE FROM test WHERE id = 162;\nDELETE FROM test WHERE id = 163;\nDELETE FROM test WHERE id = 164;\nDELETE FROM test WHERE id = 165;\nDELETE FROM test WHERE id = 166;\nDELETE FROM test WHERE id = 167;\nDELETE FROM test WHERE id = 168;\nDELETE FROM test WHERE id = 169;\nDELETE FROM test WHERE id = 170;\nDELETE FROM test WHERE id = 171;\nDELETE FROM test WHERE id = 172;\nDELETE FROM test WHERE id = 173;\nDELETE FROM test WHERE id = 174;\nDELETE FROM test WHERE id = 175;\nDELETE FROM test WHERE id = 176;\nDELETE FROM test WHERE id = 177;\nDELETE FROM test WHERE id = 178;\nDELETE FROM test WHERE id = 179;\nDELETE FROM test WHERE id = 180;\nDELETE FROM test WHERE id = 181;\nDELETE FROM test WHERE id = 182;\nDELETE FROM test WHERE id = 183;\nDELETE FROM test WHERE id = 184;\nDELETE FROM test WHERE id = 185;\nDELETE FROM test WHERE id = 186;\nDELETE FROM test WHERE id = 187;\nDELETE FROM test WHERE id = 188;\nDELETE FROM test WHERE id = 189;\nDELETE FROM test WHERE id = 190;\nDELETE FROM test WHERE id = 191;\nDELETE FROM test WHERE id = 192;\nDELETE FROM test WHERE id = 193;\nDELETE FROM test WHERE id = 194;\nDELETE FROM test WHERE id = 195;\nDELETE FROM test WHERE id = 196;\nDELETE FROM test WHERE id = 197;\nDELETE FROM test WHERE id = 198;\nDELETE FROM test WHERE id = 199;\nDELETE FROM test WHERE id = 200;\nDELETE FROM test WHERE id = 201;\nDELETE FROM test WHERE id = 202;\nDELETE FROM test WHERE id = 203;\nDELETE FROM test WHERE id = 204;\nDELETE FROM test WHERE id = 205;\nDELETE FROM test WHERE id = 206;\nDELETE FROM test WHERE id = 207;\nDELETE FROM test WHERE id = 208;\nDELETE FROM test WHERE id = 209;\nDELETE FROM test WHERE id = 210;\nDELETE FROM test WHERE id = 211;\nDELETE FROM test WHERE id = 212;\nDELETE FROM test WHERE id = 213;\nDELETE FROM test WHERE id = 214;\nDELETE FROM test WHERE id = 215;\nDELETE FROM test WHERE id = 216;\nDELETE FROM test WHERE id = 217;\nDELETE FROM test WHERE id = 218;\nDELETE FROM test WHERE id = 219;\nDELETE FROM test WHERE id = 220;\nDELETE FROM test WHERE id = 221;\nDELETE FROM test WHERE id = 222;\nDELETE FROM test WHERE id = 223;\nDELETE FROM test WHERE id = 224;\nDELETE FROM test WHERE id = 225;\nDELETE FROM test WHERE id = 226;\nDELETE FROM test WHERE id = 227;\nDELETE FROM test WHERE id = 228;\nDELETE FROM test WHERE id = 229;\nDELETE FROM test WHERE id = 230;\nDELETE FROM test WHERE id = 231;\nDELETE FROM test WHERE id = 232;\nDELETE FROM test WHERE id = 233;\nDELETE FROM test WHERE id = 234;\nDELETE FROM test WHERE id = 235;\nDELETE FROM test WHERE id = 236;\nDELETE FROM test WHERE id = 237;\nDELETE FROM test WHERE id = 238;\nDELETE FROM test WHERE id = 239;\nDELETE FROM test WHERE id = 240;\nDELETE FROM test WHERE id = 241;\nDELETE FROM test WHERE id = 242;\nDELETE FROM test WHERE id = 243;\nDELETE FROM test WHERE id = 244;\nDELETE FROM test WHERE id = 245;\nDELETE FROM test WHERE id = 246;\nDELETE FROM test WHERE id = 247;\nDELETE FROM test WHERE id = 248;\nDELETE FROM test WHERE id = 249;\nDELETE FROM test WHERE id = 250;\nDELETE FROM test WHERE id = 251;\nDELETE FROM test WHERE id = 252;\nDELETE FROM test WHERE id = 253;\nDELETE FROM test WHERE id = 254;\nDELETE FROM test WHERE id = 255;\nDELETE FROM test WHERE id = 256;\nDELETE FROM test WHERE id = 257;\nDELETE FROM test WHERE id = 258;\nDELETE FROM test WHERE id = 259;\nDELETE FROM test WHERE id = 260;\nDELETE FROM test WHERE id = 261;\nDELETE FROM test WHERE id = 262;\nDELETE FROM test WHERE id = 263;\nDELETE FROM test WHERE id = 264;\nDELETE FROM test WHERE id = 265;\nDELETE FROM test WHERE id = 266;\nDELETE FROM test WHERE id = 267;\nDELETE FROM test WHERE id = 268;\nDELETE FROM test WHERE id = 269;\nDELETE FROM test WHERE id = 270;\nDELETE FROM test WHERE id = 271;\nDELETE FROM test WHERE id = 272;\nDELETE FROM test WHERE id = 273;\nDELETE FROM test WHERE id = 274;\nDELETE FROM test WHERE id = 275;\nDELETE FROM test WHERE id = 276;\nDELETE FROM test WHERE id = 277;\nDELETE FROM test WHERE id = 278;\nDELETE FROM test WHERE id = 279;\nDELETE FROM test WHERE id = 280;\nDELETE FROM test WHERE id = 281;\nDELETE FROM test WHERE id = 282;\nDELETE FROM test WHERE id = 283;\nDELETE FROM test WHERE id = 284;\nDELETE FROM test WHERE id = 285;\nDELETE FROM test WHERE id = 286;\nDELETE FROM test WHERE id = 287;\nDELETE FROM test WHERE id = 288;\nDELETE FROM test WHERE id = 289;\nDELETE FROM test WHERE id = 290;\nDELETE FROM test WHERE id = 291;\nDELETE FROM test WHERE id = 292;\nDELETE FROM test WHERE id = 293;\nDELETE FROM test WHERE id = 294;\nDELETE FROM test WHERE id = 295;\nDELETE FROM test WHERE id = 296;\nDELETE FROM test WHERE id = 297;\nDELETE FROM test WHERE id = 298;\nDELETE FROM test WHERE id = 299;\nDELETE FROM test WHERE id = 300;\nDELETE FROM test WHERE id = 301;\nDELETE FROM test WHERE id = 302;\nDELETE FROM test WHERE id = 303;\nDELETE FROM test WHERE id = 304;\nDELETE FROM test WHERE id = 305;\nDELETE FROM test WHERE id = 306;\nDELETE FROM test WHERE id = 307;\nDELETE FROM test WHERE id = 308;\nDELETE FROM test WHERE id = 309;\nDELETE FROM test WHERE id = 310;\nDELETE FROM test WHERE id = 311;\nDELETE FROM test WHERE id = 312;\nDELETE FROM test WHERE id = 313;\nDELETE FROM test WHERE id = 314;\nDELETE FROM test WHERE id = 315;\nDELETE FROM test WHERE id = 316;\nDELETE FROM test WHERE id = 317;\nDELETE FROM test WHERE id = 318;\nDELETE FROM test WHERE id = 319;\nDELETE FROM test WHERE id = 320;\nDELETE FROM test WHERE id = 321;\nDELETE FROM test WHERE id = 322;\nDELETE FROM test WHERE id = 323;\nDELETE FROM test WHERE id = 324;\nDELETE FROM test WHERE id = 325;\nDELETE FROM test WHERE id = 326;\nDELETE FROM test WHERE id = 327;\nDELETE FROM test WHERE id = 328;\nDELETE FROM test WHERE id = 329;\nDELETE FROM test WHERE id = 330;\nDELETE FROM test WHERE id = 331;\nDELETE FROM test WHERE id = 332;\nDELETE FROM test WHERE id = 333;\nDELETE FROM test WHERE id = 334;\nDELETE FROM test WHERE id = 335;\nDELETE FROM test WHERE id = 336;\nDELETE FROM test WHERE id = 337;\nDELETE FROM test WHERE id = 338;\nDELETE FROM test WHERE id = 339;\nDELETE FROM test WHERE id = 340;\nDELETE FROM test WHERE id = 341;\nDELETE FROM test WHERE id = 342;\nDELETE FROM test WHERE id = 343;\nDELETE FROM test WHERE id = 344;\nDELETE FROM test WHERE id = 345;\nDELETE FROM test WHERE id = 346;\nDELETE FROM test WHERE id = 347;\nDELETE FROM test WHERE id = 348;\nDELETE FROM test WHERE id = 349;\nDELETE FROM test WHERE id = 350;\nDELETE FROM test WHERE id = 351;\nDELETE FROM test WHERE id = 352;\nDELETE FROM test WHERE id = 353;\nDELETE FROM test WHERE id = 354;\nDELETE FROM test WHERE id = 355;\nDELETE FROM test WHERE id = 356;\nDELETE FROM test WHERE id = 357;\nDELETE FROM test WHERE id = 358;\nDELETE FROM test WHERE id = 359;\nDELETE FROM test WHERE id = 360;\nDELETE FROM test WHERE id = 361;\nDELETE FROM test WHERE id = 362;\nDELETE FROM test WHERE id = 363;\nDELETE FROM test WHERE id = 364;\nDELETE FROM test WHERE id = 365;\nDELETE FROM test WHERE id = 366;\nDELETE FROM test WHERE id = 367;\nDELETE FROM test WHERE id = 368;\nDELETE FROM test WHERE id = 369;\nDELETE FROM test WHERE id = 370;\nDELETE FROM test WHERE id = 371;\nDELETE FROM test WHERE id = 372;\nDELETE FROM test WHERE id = 373;\nDELETE FROM test WHERE id = 374;\nDELETE FROM test WHERE id = 375;\nDELETE FROM test WHERE id = 376;\nDELETE FROM test WHERE id = 377;\nDELETE FROM test WHERE id = 378;\nDELETE FROM test WHERE id = 379;\nDELETE FROM test WHERE id = 380;\nDELETE FROM test WHERE id = 381;\nDELETE FROM test WHERE id = 382;\nDELETE FROM test WHERE id = 383;\nDELETE FROM test WHERE id = 384;\nDELETE FROM test WHERE id = 385;\nDELETE FROM test WHERE id = 386;\nDELETE FROM test WHERE id = 387;\nDELETE FROM test WHERE id = 388;\nDELETE FROM test WHERE id = 389;\nDELETE FROM test WHERE id = 390;\nDELETE FROM test WHERE id = 391;\nDELETE FROM test WHERE id = 392;\nDELETE FROM test WHERE id = 393;\nDELETE FROM test WHERE id = 394;\nDELETE FROM test WHERE id = 395;\nDELETE FROM test WHERE id = 396;\nDELETE FROM test WHERE id = 397;\nDELETE FROM test WHERE id = 398;\nDELETE FROM test WHERE id = 399;\nDELETE FROM test WHERE id = 400;\nDELETE FROM test WHERE id = 401;\nDELETE FROM test WHERE id = 402;\nDELETE FROM test WHERE id = 403;\nDELETE FROM test WHERE id = 404;\nDELETE FROM test WHERE id = 405;\nDELETE FROM test WHERE id = 406;\nDELETE FROM test WHERE id = 407;\nDELETE FROM test WHERE id = 408;\nDELETE FROM test WHERE id = 409;\nDELETE FROM test WHERE id = 410;\nDELETE FROM test WHERE id = 411;\nDELETE FROM test WHERE id = 412;\nDELETE FROM test WHERE id = 413;\nDELETE FROM test WHERE id = 414;\nDELETE FROM test WHERE id = 415;\nDELETE FROM test WHERE id = 416;\nDELETE FROM test WHERE id = 417;\nDELETE FROM test WHERE id = 418;\nDELETE FROM test WHERE id = 419;\nDELETE FROM test WHERE id = 420;\nDELETE FROM test WHERE id = 421;\nDELETE FROM test WHERE id = 422;\nDELETE FROM test WHERE id = 423;\nDELETE FROM test WHERE id = 424;\nDELETE FROM test WHERE id = 425;\nDELETE FROM test WHERE id = 426;\nDELETE FROM test WHERE id = 427;\nDELETE FROM test WHERE id = 428;\nDELETE FROM test WHERE id = 429;\nDELETE FROM test WHERE id = 430;\nDELETE FROM test WHERE id = 431;\nDELETE FROM test WHERE id = 432;\nDELETE FROM test WHERE id = 433;\nDELETE FROM test WHERE id = 434;\nDELETE FROM test WHERE id = 435;\nDELETE FROM test WHERE id = 436;\nDELETE FROM test WHERE id = 437;\nDELETE FROM test WHERE id = 438;\nDELETE FROM test WHERE id = 439;\nDELETE FROM test WHERE id = 440;\nDELETE FROM test WHERE id = 441;\nDELETE FROM test WHERE id = 442;\nDELETE FROM test WHERE id = 443;\nDELETE FROM test WHERE id = 444;\nDELETE FROM test WHERE id = 445;\nDELETE FROM test WHERE id = 446;\nDELETE FROM test WHERE id = 447;\nDELETE FROM test WHERE id = 448;\nDELETE FROM test WHERE id = 449;\nDELETE FROM test WHERE id = 450;\nDELETE FROM test WHERE id = 451;\nDELETE FROM test WHERE id = 452;\nDELETE FROM test WHERE id = 453;\nDELETE FROM test WHERE id = 454;\nDELETE FROM test WHERE id = 455;\nDELETE FROM test WHERE id = 456;\nDELETE FROM test WHERE id = 457;\nDELETE FROM test WHERE id = 458;\nDELETE FROM test WHERE id = 459;\nDELETE FROM test WHERE id = 460;\nDELETE FROM test WHERE id = 461;\nDELETE FROM test WHERE id = 462;\nDELETE FROM test WHERE id = 463;\nDELETE FROM test WHERE id = 464;\nDELETE FROM test WHERE id = 465;\nDELETE FROM test WHERE id = 466;\nDELETE FROM test WHERE id = 467;\nDELETE FROM test WHERE id = 468;\nDELETE FROM test WHERE id = 469;\nDELETE FROM test WHERE id = 470;\nDELETE FROM test WHERE id = 471;\nDELETE FROM test WHERE id = 472;\nDELETE FROM test WHERE id = 473;\nDELETE FROM test WHERE id = 474;\nDELETE FROM test WHERE id = 475;\nDELETE FROM test WHERE id = 476;\nDELETE FROM test WHERE id = 477;\nDELETE FROM test WHERE id = 478;\nDELETE FROM test WHERE id = 479;\nDELETE FROM test WHERE id = 480;\nDELETE FROM test WHERE id = 481;\nDELETE FROM test WHERE id = 482;\nDELETE FROM test WHERE id = 483;\nDELETE FROM test WHERE id = 484;\nDELETE FROM test WHERE id = 485;\nDELETE FROM test WHERE id = 486;\nDELETE FROM test WHERE id = 487;\nDELETE FROM test WHERE id = 488;\nDELETE FROM test WHERE id = 489;\nDELETE FROM test WHERE id = 490;\nDELETE FROM test WHERE id = 491;\nDELETE FROM test WHERE id = 492;\nDELETE FROM test WHERE id = 493;\nDELETE FROM test WHERE id = 494;\nDELETE FROM test WHERE id = 495;\nDELETE FROM test WHERE id = 496;\nDELETE FROM test WHERE id = 497;\nDELETE FROM test WHERE id = 498;\nDELETE FROM test WHERE id = 499;\nDELETE FROM test WHERE id = 500;\nDELETE FROM test WHERE id = 501;\nDELETE FROM test WHERE id = 502;\nDELETE FROM test WHERE id = 503;\nDELETE FROM test WHERE id = 504;\nDELETE FROM test WHERE id = 505;\nDELETE FROM test WHERE id = 506;\nDELETE FROM test WHERE id = 507;\nDELETE FROM test WHERE id = 508;\nDELETE FROM test WHERE id = 509;\nDELETE FROM test WHERE id = 510;\nDELETE FROM test WHERE id = 511;\nDELETE FROM test WHERE id = 512;\nDELETE FROM test WHERE id = 513;\nDELETE FROM test WHERE id = 514;\nDELETE FROM test WHERE id = 515;\nDELETE FROM test WHERE id = 516;\nDELETE FROM test WHERE id = 517;\nDELETE FROM test WHERE id = 518;\nDELETE FROM test WHERE id = 519;\nDELETE FROM test WHERE id = 520;\nDELETE FROM test WHERE id = 521;\nDELETE FROM test WHERE id = 522;\nDELETE FROM test WHERE id = 523;\nDELETE FROM test WHERE id = 524;\nDELETE FROM test WHERE id = 525;\nDELETE FROM test WHERE id = 526;\nDELETE FROM test WHERE id = 527;\nDELETE FROM test WHERE id = 528;\nDELETE FROM test WHERE id = 529;\nDELETE FROM test WHERE id = 530;\nDELETE FROM test WHERE id = 531;\nDELETE FROM test WHERE id = 532;\nDELETE FROM test WHERE id = 533;\nDELETE FROM test WHERE id = 534;\nDELETE FROM test WHERE id = 535;\nDELETE FROM test WHERE id = 536;\nDELETE FROM test WHERE id = 537;\nDELETE FROM test WHERE id = 538;\nDELETE FROM test WHERE id = 539;\nDELETE FROM test WHERE id = 540;\nDELETE FROM test WHERE id = 541;\nDELETE FROM test WHERE id = 542;\nDELETE FROM test WHERE id = 543;\nDELETE FROM test WHERE id = 544;\nDELETE FROM test WHERE id = 545;\nDELETE FROM test WHERE id = 546;\nDELETE FROM test WHERE id = 547;\nDELETE FROM test WHERE id = 548;\nDELETE FROM test WHERE id = 549;\nDELETE FROM test WHERE id = 550;\nDELETE FROM test WHERE id = 551;\nDELETE FROM test WHERE id = 552;\nDELETE FROM test WHERE id = 553;\nDELETE FROM test WHERE id = 554;\nDELETE FROM test WHERE id = 555;\nDELETE FROM test WHERE id = 556;\nDELETE FROM test WHERE id = 557;\nDELETE FROM test WHERE id = 558;\nDELETE FROM test WHERE id = 559;\nDELETE FROM test WHERE id = 560;\nDELETE FROM test WHERE id = 561;\nDELETE FROM test WHERE id = 562;\nDELETE FROM test WHERE id = 563;\nDELETE FROM test WHERE id = 564;\nDELETE FROM test WHERE id = 565;\nDELETE FROM test WHERE id = 566;\nDELETE FROM test WHERE id = 567;\nDELETE FROM test WHERE id = 568;\nDELETE FROM test WHERE id = 569;\nDELETE FROM test WHERE id = 570;\nDELETE FROM test WHERE id = 571;\nDELETE FROM test WHERE id = 572;\nDELETE FROM test WHERE id = 573;\nDELETE FROM test WHERE id = 574;\nDELETE FROM test WHERE id = 575;\nDELETE FROM test WHERE id = 576;\nDELETE FROM test WHERE id = 577;\nDELETE FROM test WHERE id = 578;\nDELETE FROM test WHERE id = 579;\nDELETE FROM test WHERE id = 580;\nDELETE FROM test WHERE id = 581;\nDELETE FROM test WHERE id = 582;\nDELETE FROM test WHERE id = 583;\nDELETE FROM test WHERE id = 584;\nDELETE FROM test WHERE id = 585;\nDELETE FROM test WHERE id = 586;\nDELETE FROM test WHERE id = 587;\nDELETE FROM test WHERE id = 588;\nDELETE FROM test WHERE id = 589;\nDELETE FROM test WHERE id = 590;\nDELETE FROM test WHERE id = 591;\nDELETE FROM test WHERE id = 592;\nDELETE FROM test WHERE id = 593;\nDELETE FROM test WHERE id = 594;\nDELETE FROM test WHERE id = 595;\nDELETE FROM test WHERE id = 596;\nDELETE FROM test WHERE id = 597;\nDELETE FROM test WHERE id = 598;\nDELETE FROM test WHERE id = 599;\nDELETE FROM test WHERE id = 600;\nDELETE FROM test WHERE id = 601;\nDELETE FROM test WHERE id = 602;\nDELETE FROM test WHERE id = 603;\nDELETE FROM test WHERE id = 604;\nDELETE FROM test WHERE id = 605;\nDELETE FROM test WHERE id = 606;\nDELETE FROM test WHERE id = 607;\nDELETE FROM test WHERE id = 608;\nDELETE FROM test WHERE id = 609;\nDELETE FROM test WHERE id = 610;\nDELETE FROM test WHERE id = 611;\nDELETE FROM test WHERE id = 612;\nDELETE FROM test WHERE id = 613;\nDELETE FROM test WHERE id = 614;\nDELETE FROM test WHERE id = 615;\nDELETE FROM test WHERE id = 616;\nDELETE FROM test WHERE id = 617;\nDELETE FROM test WHERE id = 618;\nDELETE FROM test WHERE id = 619;\nDELETE FROM test WHERE id = 620;\nDELETE FROM test WHERE id = 621;\nDELETE FROM test WHERE id = 622;\nDELETE FROM test WHERE id = 623;\nDELETE FROM test WHERE id = 624;\nDELETE FROM test WHERE id = 625;\nDELETE FROM test WHERE id = 626;\nDELETE FROM test WHERE id = 627;\nDELETE FROM test WHERE id = 628;\nDELETE FROM test WHERE id = 629;\nDELETE FROM test WHERE id = 630;\nDELETE FROM test WHERE id = 631;\nDELETE FROM test WHERE id = 632;\nDELETE FROM test WHERE id = 633;\nDELETE FROM test WHERE id = 634;\nDELETE FROM test WHERE id = 635;\nDELETE FROM test WHERE id = 636;\nDELETE FROM test WHERE id = 637;\nDELETE FROM test WHERE id = 638;\nDELETE FROM test WHERE id = 639;\nDELETE FROM test WHERE id = 640;\nDELETE FROM test WHERE id = 641;\nDELETE FROM test WHERE id = 642;\nDELETE FROM test WHERE id = 643;\nDELETE FROM test WHERE id = 644;\nDELETE FROM test WHERE id = 645;\nDELETE FROM test WHERE id = 646;\nDELETE FROM test WHERE id = 647;\nDELETE FROM test WHERE id = 648;\nDELETE FROM test WHERE id = 649;\nDELETE FROM test WHERE id = 650;\nDELETE FROM test WHERE id = 651;\nDELETE FROM test WHERE id = 652;\nDELETE FROM test WHERE id = 653;\nDELETE FROM test WHERE id = 654;\nDELETE FROM test WHERE id = 655;\nDELETE FROM test WHERE id = 656;\nDELETE FROM test WHERE id = 657;\nDELETE FROM test WHERE id = 658;\nDELETE FROM test WHERE id = 659;\nDELETE FROM test WHERE id = 660;\nDELETE FROM test WHERE id = 661;\nDELETE FROM test WHERE id = 662;\nDELETE FROM test WHERE id = 663;\nDELETE FROM test WHERE id = 664;\nDELETE FROM test WHERE id = 665;\nDELETE FROM test WHERE id = 666;\nDELETE FROM test WHERE id = 667;\nDELETE FROM test WHERE id = 668;\nDELETE FROM test WHERE id = 669;\nDELETE FROM test WHERE id = 670;\nDELETE FROM test WHERE id = 671;\nDELETE FROM test WHERE id = 672;\nDELETE FROM test WHERE id = 673;\nDELETE FROM test WHERE id = 674;\nDELETE FROM test WHERE id = 675;\nDELETE FROM test WHERE id = 676;\nDELETE FROM test WHERE id = 677;\nDELETE FROM test WHERE id = 678;\nDELETE FROM test WHERE id = 679;\nDELETE FROM test WHERE id = 680;\nDELETE FROM test WHERE id = 681;\nDELETE FROM test WHERE id = 682;\nDELETE FROM test WHERE id = 683;\nDELETE FROM test WHERE id = 684;\nDELETE FROM test WHERE id = 685;\nDELETE FROM test WHERE id = 686;\nDELETE FROM test WHERE id = 687;\nDELETE FROM test WHERE id = 688;\nDELETE FROM test WHERE id = 689;\nDELETE FROM test WHERE id = 690;\nDELETE FROM test WHERE id = 691;\nDELETE FROM test WHERE id = 692;\nDELETE FROM test WHERE id = 693;\nDELETE FROM test WHERE id = 694;\nDELETE FROM test WHERE id = 695;\nDELETE FROM test WHERE id = 696;\nDELETE FROM test WHERE id = 697;\nDELETE FROM test WHERE id = 698;\nDELETE FROM test WHERE id = 699;\nDELETE FROM test WHERE id = 700;\nDELETE FROM test WHERE id = 701;\nDELETE FROM test WHERE id = 702;\nDELETE FROM test WHERE id = 703;\nDELETE FROM test WHERE id = 704;\nDELETE FROM test WHERE id = 705;\nDELETE FROM test WHERE id = 706;\nDELETE FROM test WHERE id = 707;\nDELETE FROM test WHERE id = 708;\nDELETE FROM test WHERE id = 709;\nDELETE FROM test WHERE id = 710;\nDELETE FROM test WHERE id = 711;\nDELETE FROM test WHERE id = 712;\nDELETE FROM test WHERE id = 713;\nDELETE FROM test WHERE id = 714;\nDELETE FROM test WHERE id = 715;\nDELETE FROM test WHERE id = 716;\nDELETE FROM test WHERE id = 717;\nDELETE FROM test WHERE id = 718;\nDELETE FROM test WHERE id = 719;\nDELETE FROM test WHERE id = 720;\nDELETE FROM test WHERE id = 721;\nDELETE FROM test WHERE id = 722;\nDELETE FROM test WHERE id = 723;\nDELETE FROM test WHERE id = 724;\nDELETE FROM test WHERE id = 725;\nDELETE FROM test WHERE id = 726;\nDELETE FROM test WHERE id = 727;\nDELETE FROM test WHERE id = 728;\nDELETE FROM test WHERE id = 729;\nDELETE FROM test WHERE id = 730;\nDELETE FROM test WHERE id = 731;\nDELETE FROM test WHERE id = 732;\nDELETE FROM test WHERE id = 733;\nDELETE FROM test WHERE id = 734;\nDELETE FROM test WHERE id = 735;\nDELETE FROM test WHERE id = 736;\nDELETE FROM test WHERE id = 737;\nDELETE FROM test WHERE id = 738;\nDELETE FROM test WHERE id = 739;\nDELETE FROM test WHERE id = 740;\nDELETE FROM test WHERE id = 741;\nDELETE FROM test WHERE id = 742;\nDELETE FROM test WHERE id = 743;\nDELETE FROM test WHERE id = 744;\nDELETE FROM test WHERE id = 745;\nDELETE FROM test WHERE id = 746;\nDELETE FROM test WHERE id = 747;\nDELETE FROM test WHERE id = 748;\nDELETE FROM test WHERE id = 749;\nDELETE FROM test WHERE id = 750;\nDELETE FROM test WHERE id = 751;\nDELETE FROM test WHERE id = 752;\nDELETE FROM test WHERE id = 753;\nDELETE FROM test WHERE id = 754;\nDELETE FROM test WHERE id = 755;\nDELETE FROM test WHERE id = 756;\nDELETE FROM test WHERE id = 757;\nDELETE FROM test WHERE id = 758;\nDELETE FROM test WHERE id = 759;\nDELETE FROM test WHERE id = 760;\nDELETE FROM test WHERE id = 761;\nDELETE FROM test WHERE id = 762;\nDELETE FROM test WHERE id = 763;\nDELETE FROM test WHERE id = 764;\nDELETE FROM test WHERE id = 765;\nDELETE FROM test WHERE id = 766;\nDELETE FROM test WHERE id = 767;\nDELETE FROM test WHERE id = 768;\nDELETE FROM test WHERE id = 769;\nDELETE FROM test WHERE id = 770;\nDELETE FROM test WHERE id = 771;\nDELETE FROM test WHERE id = 772;\nDELETE FROM test WHERE id = 773;\nDELETE FROM test WHERE id = 774;\nDELETE FROM test WHERE id = 775;\nDELETE FROM test WHERE id = 776;\nDELETE FROM test WHERE id = 777;\nDELETE FROM test WHERE id = 778;\nDELETE FROM test WHERE id = 779;\nDELETE FROM test WHERE id = 780;\nDELETE FROM test WHERE id = 781;\nDELETE FROM test WHERE id = 782;\nDELETE FROM test WHERE id = 783;\nDELETE FROM test WHERE id = 784;\nDELETE FROM test WHERE id = 785;\nDELETE FROM test WHERE id = 786;\nDELETE FROM test WHERE id = 787;\nDELETE FROM test WHERE id = 788;\nDELETE FROM test WHERE id = 789;\nDELETE FROM test WHERE id = 790;\nDELETE FROM test WHERE id = 791;\nDELETE FROM test WHERE id = 792;\nDELETE FROM test WHERE id = 793;\nDELETE FROM test WHERE id = 794;\nDELETE FROM test WHERE id = 795;\nDELETE FROM test WHERE id = 796;\nDELETE FROM test WHERE id = 797;\nDELETE FROM test WHERE id = 798;\nDELETE FROM test WHERE id = 799;\nDELETE FROM test WHERE id = 800;\nDELETE FROM test WHERE id = 801;\nDELETE FROM test WHERE id = 802;\nDELETE FROM test WHERE id = 803;\nDELETE FROM test WHERE id = 804;\nDELETE FROM test WHERE id = 805;\nDELETE FROM test WHERE id = 806;\nDELETE FROM test WHERE id = 807;\nDELETE FROM test WHERE id = 808;\nDELETE FROM test WHERE id = 809;\nDELETE FROM test WHERE id = 810;\nDELETE FROM test WHERE id = 811;\nDELETE FROM test WHERE id = 812;\nDELETE FROM test WHERE id = 813;\nDELETE FROM test WHERE id = 814;\nDELETE FROM test WHERE id = 815;\nDELETE FROM test WHERE id = 816;\nDELETE FROM test WHERE id = 817;\nDELETE FROM test WHERE id = 818;\nDELETE FROM test WHERE id = 819;\nDELETE FROM test WHERE id = 820;\nDELETE FROM test WHERE id = 821;\nDELETE FROM test WHERE id = 822;\nDELETE FROM test WHERE id = 823;\nDELETE FROM test WHERE id = 824;\nDELETE FROM test WHERE id = 825;\nDELETE FROM test WHERE id = 826;\nDELETE FROM test WHERE id = 827;\nDELETE FROM test WHERE id = 828;\nDELETE FROM test WHERE id = 829;\nDELETE FROM test WHERE id = 830;\nDELETE FROM test WHERE id = 831;\nDELETE FROM test WHERE id = 832;\nDELETE FROM test WHERE id = 833;\nDELETE FROM test WHERE id = 834;\nDELETE FROM test WHERE id = 835;\nDELETE FROM test WHERE id = 836;\nDELETE FROM test WHERE id = 837;\nDELETE FROM test WHERE id = 838;\nDELETE FROM test WHERE id = 839;\nDELETE FROM test WHERE id = 840;\nDELETE FROM test WHERE id = 841;\nDELETE FROM test WHERE id = 842;\nDELETE FROM test WHERE id = 843;\nDELETE FROM test WHERE id = 844;\nDELETE FROM test WHERE id = 845;\nDELETE FROM test WHERE id = 846;\nDELETE FROM test WHERE id = 847;\nDELETE FROM test WHERE id = 848;\nDELETE FROM test WHERE id = 849;\nDELETE FROM test WHERE id = 850;\nDELETE FROM test WHERE id = 851;\nDELETE FROM test WHERE id = 852;\nDELETE FROM test WHERE id = 853;\nDELETE FROM test WHERE id = 854;\nDELETE FROM test WHERE id = 855;\nDELETE FROM test WHERE id = 856;\nDELETE FROM test WHERE id = 857;\nDELETE FROM test WHERE id = 858;\nDELETE FROM test WHERE id = 859;\nDELETE FROM test WHERE id = 860;\nDELETE FROM test WHERE id = 861;\nDELETE FROM test WHERE id = 862;\nDELETE FROM test WHERE id = 863;\nDELETE FROM test WHERE id = 864;\nDELETE FROM test WHERE id = 865;\nDELETE FROM test WHERE id = 866;\nDELETE FROM test WHERE id = 867;\nDELETE FROM test WHERE id = 868;\nDELETE FROM test WHERE id = 869;\nDELETE FROM test WHERE id = 870;\nDELETE FROM test WHERE id = 871;\nDELETE FROM test WHERE id = 872;\nDELETE FROM test WHERE id = 873;\nDELETE FROM test WHERE id = 874;\nDELETE FROM test WHERE id = 875;\nDELETE FROM test WHERE id = 876;\nDELETE FROM test WHERE id = 877;\nDELETE FROM test WHERE id = 878;\nDELETE FROM test WHERE id = 879;\nDELETE FROM test WHERE id = 880;\nDELETE FROM test WHERE id = 881;\nDELETE FROM test WHERE id = 882;\nDELETE FROM test WHERE id = 883;\nDELETE FROM test WHERE id = 884;\nDELETE FROM test WHERE id = 885;\nDELETE FROM test WHERE id = 886;\nDELETE FROM test WHERE id = 887;\nDELETE FROM test WHERE id = 888;\nDELETE FROM test WHERE id = 889;\nDELETE FROM test WHERE id = 890;\nDELETE FROM test WHERE id = 891;\nDELETE FROM test WHERE id = 892;\nDELETE FROM test WHERE id = 893;\nDELETE FROM test WHERE id = 894;\nDELETE FROM test WHERE id = 895;\nDELETE FROM test WHERE id = 896;\nDELETE FROM test WHERE id = 897;\nDELETE FROM test WHERE id = 898;\nDELETE FROM test WHERE id = 899;\nDELETE FROM test WHERE id = 900;\nDELETE FROM test WHERE id = 901;\nDELETE FROM test WHERE id = 902;\nDELETE FROM test WHERE id = 903;\nDELETE FROM test WHERE id = 904;\nDELETE FROM test WHERE id = 905;\nDELETE FROM test WHERE id = 906;\nDELETE FROM test WHERE id = 907;\nDELETE FROM test WHERE id = 908;\nDELETE FROM test WHERE id = 909;\nDELETE FROM test WHERE id = 910;\nDELETE FROM test WHERE id = 911;\nDELETE FROM test WHERE id = 912;\nDELETE FROM test WHERE id = 913;\nDELETE FROM test WHERE id = 914;\nDELETE FROM test WHERE id = 915;\nDELETE FROM test WHERE id = 916;\nDELETE FROM test WHERE id = 917;\nDELETE FROM test WHERE id = 918;\nDELETE FROM test WHERE id = 919;\nDELETE FROM test WHERE id = 920;\nDELETE FROM test WHERE id = 921;\nDELETE FROM test WHERE id = 922;\nDELETE FROM test WHERE id = 923;\nDELETE FROM test WHERE id = 924;\nDELETE FROM test WHERE id = 925;\nDELETE FROM test WHERE id = 926;\nDELETE FROM test WHERE id = 927;\nDELETE FROM test WHERE id = 928;\nDELETE FROM test WHERE id = 929;\nDELETE FROM test WHERE id = 930;\nDELETE FROM test WHERE id = 931;\nDELETE FROM test WHERE id = 932;\nDELETE FROM test WHERE id = 933;\nDELETE FROM test WHERE id = 934;\nDELETE FROM test WHERE id = 935;\nDELETE FROM test WHERE id = 936;\nDELETE FROM test WHERE id = 937;\nDELETE FROM test WHERE id = 938;\nDELETE FROM test WHERE id = 939;\nDELETE FROM test WHERE id = 940;\nDELETE FROM test WHERE id = 941;\nDELETE FROM test WHERE id = 942;\nDELETE FROM test WHERE id = 943;\nDELETE FROM test WHERE id = 944;\nDELETE FROM test WHERE id = 945;\nDELETE FROM test WHERE id = 946;\nDELETE FROM test WHERE id = 947;\nDELETE FROM test WHERE id = 948;\nDELETE FROM test WHERE id = 949;\nDELETE FROM test WHERE id = 950;\nDELETE FROM test WHERE id = 951;\nDELETE FROM test WHERE id = 952;\nDELETE FROM test WHERE id = 953;\nDELETE FROM test WHERE id = 954;\nDELETE FROM test WHERE id = 955;\nDELETE FROM test WHERE id = 956;\nDELETE FROM test WHERE id = 957;\nDELETE FROM test WHERE id = 958;\nDELETE FROM test WHERE id = 959;\nDELETE FROM test WHERE id = 960;\nDELETE FROM test WHERE id = 961;\nDELETE FROM test WHERE id = 962;\nDELETE FROM test WHERE id = 963;\nDELETE FROM test WHERE id = 964;\nDELETE FROM test WHERE id = 965;\nDELETE FROM test WHERE id = 966;\nDELETE FROM test WHERE id = 967;\nDELETE FROM test WHERE id = 968;\nDELETE FROM test WHERE id = 969;\nDELETE FROM test WHERE id = 970;\nDELETE FROM test WHERE id = 971;\nDELETE FROM test WHERE id = 972;\nDELETE FROM test WHERE id = 973;\nDELETE FROM test WHERE id = 974;\nDELETE FROM test WHERE id = 975;\nDELETE FROM test WHERE id = 976;\nDELETE FROM test WHERE id = 977;\nDELETE FROM test WHERE id = 978;\nDELETE FROM test WHERE id = 979;\nDELETE FROM test WHERE id = 980;\nDELETE FROM test WHERE id = 981;\nDELETE FROM test WHERE id = 982;\nDELETE FROM test WHERE id = 983;\nDELETE FROM test WHERE id = 984;\nDELETE FROM test WHERE id = 985;\nDELETE FROM test WHERE id = 986;\nDELETE FROM test WHERE id = 987;\nDELETE FROM test WHERE id = 988;\nDELETE FROM test WHERE id = 989;\nDELETE FROM test WHERE id = 990;\nDELETE FROM test WHERE id = 991;\nDELETE FROM test WHERE id = 992;\nDELETE FROM test WHERE id = 993;\nDELETE FROM test WHERE id = 994;\nDELETE FROM test WHERE id = 995;\nDELETE FROM test WHERE id = 996;\nDELETE FROM test WHERE id = 997;\nDELETE FROM test WHERE id = 998;\nDELETE FROM test WHERE id = 999;\nSELECT * FROM test;\nDROP TABLE test;\n" }, { "alpha_fraction": 0.5647321343421936, "alphanum_fraction": 0.5848214030265808, "avg_line_length": 30.14285659790039, "blob_id": "c2c7d529f59aab197e70a4a5ca6cc95d3b9b06e8", "content_id": "39b5cec8dbce9a699de40e7324ec7fe732540b1b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 448, "license_type": "no_license", "max_line_length": 74, "num_lines": 14, "path": "/container builder/alpine-attrs/OperationMaker.py", "repo_name": "nate-ge/json-file-dataset", "src_encoding": "UTF-8", "text": "num = 200\r\nfilename = \"operation-command-{0}\".format(num)\r\nwith open(filename, 'w') as file_object:\r\n for i in range(num):\r\n file_object.write(\"touch test{0}\\n\".format(i))\r\n\r\n for i in range(num):\r\n file_object.write(\"echo \\\"Hello world{0}\\\" > test{0}\\n\".format(i))\r\n\r\n for i in range(num):\r\n file_object.write(\"cat test{0}\\n\".format(i))\r\n\r\n for i in range(num):\r\n file_object.write(\"rm test{0}\\n\".format(i))" }, { "alpha_fraction": 0.5161290168762207, "alphanum_fraction": 0.6290322542190552, "avg_line_length": 11.199999809265137, "blob_id": "3a4abe77683a2504668a0ba6b115919907ffa27f", "content_id": "c41332fc143cbe2843f2e989dca7c4a13c5165fe", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 62, "license_type": "no_license", "max_line_length": 20, "num_lines": 5, "path": "/container builder/nginx-attrs/test", "repo_name": "nate-ge/json-file-dataset", "src_encoding": "UTF-8", "text": "#!/bin/bash \nfor i in {1..1000} \ndo \ncurl localhost:80\ndone \n" }, { "alpha_fraction": 0.5882353186607361, "alphanum_fraction": 0.720588207244873, "avg_line_length": 21.66666603088379, "blob_id": "a2e4489d2788b730dc5ab04997af63cba39bee67", "content_id": "d699036e0ae0c50af457c80ddddfeb7711ce3a56", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 68, "license_type": "no_license", "max_line_length": 35, "num_lines": 3, "path": "/container builder/node/visit.py", "repo_name": "nate-ge/json-file-dataset", "src_encoding": "UTF-8", "text": "import os\nfor i in range(2000):\n os.system(\"curl localhost:49160\")\n" }, { "alpha_fraction": 0.5233333110809326, "alphanum_fraction": 0.5583333373069763, "avg_line_length": 33.411766052246094, "blob_id": "c621de1a314cd88119841b7248800a8e71e2cdf6", "content_id": "5299bd27d65d96775de10b4ee3bec938719a3196", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "YAML", "length_bytes": 692, "license_type": "no_license", "max_line_length": 92, "num_lines": 17, "path": "/container builder/mysql/docker-compose.yml", "repo_name": "nate-ge/json-file-dataset", "src_encoding": "UTF-8", "text": "version: '3'\r\nservices:\r\n db:\r\n #构建mysql镜像\r\n image: mysql:8.0.21\r\n container_name: mysql-db\r\n command: mysqld --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci\r\n environment:\r\n MYSQL_ROOT_PASSWORD: root #root管理员用户密码\r\n ports:\r\n - '6606:3306' #host物理直接映射端口为6606\r\n volumes:\r\n #mysql数据库挂载到host物理机目录/e/docker/mysql/data/db\r\n - \"./db:/var/lib/mysql\" \r\n #容器的配置目录挂载到host物理机目录/e/docker/mysql/data/conf \r\n # - \"./conf:/etc/mysql/conf.d\"\r\n - \"./test.sql:/test.sql\"" }, { "alpha_fraction": 0.7834645509719849, "alphanum_fraction": 0.8031495809555054, "avg_line_length": 55.44444274902344, "blob_id": "0424a595187fd88724ca257c2f702eb65597149f", "content_id": "0f703bf662964fa60a91e7f568dcddad4b8a7cf3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 508, "license_type": "no_license", "max_line_length": 111, "num_lines": 9, "path": "/README.md", "repo_name": "nate-ge/json-file-dataset", "src_encoding": "UTF-8", "text": "# json-file-dataset\n\nThe commands used to run and operate containers are stored in the directory \"container builder\".\n\nThe json-file logs in IMGxfs are stored in the directory \"logs in IMGxfs\" and the image can be download from:\nhttps://drive.google.com/file/d/1moF0JMcqISq2-1MyNbcPtU8CVBFZv-LL/view?usp=sharing\n\nThe json-file logs in IMGext4 are stored in the directory \"logs in IMGext4\" and the image can be download from:\nhttps://drive.google.com/file/d/1VievUnIzQyCOuJqPabIfUmU3JiaXW7SL/view?usp=sharing\n" }, { "alpha_fraction": 0.5960384011268616, "alphanum_fraction": 0.639255702495575, "avg_line_length": 44.02702713012695, "blob_id": "f7f66419fa25174fe393ae89f75c7591c77c4eda", "content_id": "c9be09f89f989bb50cfb95d228e72fe61bacaf9e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "SQL", "length_bytes": 1666, "license_type": "no_license", "max_line_length": 61, "num_lines": 37, "path": "/container builder/mongo/test.sql", "repo_name": "nate-ge/json-file-dataset", "src_encoding": "UTF-8", "text": "use testdb\ndb.createCollection(\"testColl\")\ndb.testColl.insert({title: '0',description: '0', likes: 0})\ndb.testColl.insert({title: '1',description: '2', likes: 3})\ndb.testColl.insert({title: '2',description: '4', likes: 6})\ndb.testColl.insert({title: '3',description: '6', likes: 9})\ndb.testColl.insert({title: '4',description: '8', likes: 12})\ndb.testColl.insert({title: '5',description: '10', likes: 15})\ndb.testColl.insert({title: '6',description: '12', likes: 18})\ndb.testColl.insert({title: '7',description: '14', likes: 21})\ndb.testColl.insert({title: '8',description: '16', likes: 24})\ndb.testColl.insert({title: '9',description: '18', likes: 27})\ndb.testColl.find().pretty()\ndb.testColl.update({'title':'0'},{$set:{'title':'1'}})\ndb.testColl.update({'title':'1'},{$set:{'title':'2'}})\ndb.testColl.update({'title':'2'},{$set:{'title':'3'}})\ndb.testColl.update({'title':'3'},{$set:{'title':'4'}})\ndb.testColl.update({'title':'4'},{$set:{'title':'5'}})\ndb.testColl.update({'title':'5'},{$set:{'title':'6'}})\ndb.testColl.update({'title':'6'},{$set:{'title':'7'}})\ndb.testColl.update({'title':'7'},{$set:{'title':'8'}})\ndb.testColl.update({'title':'8'},{$set:{'title':'9'}})\ndb.testColl.update({'title':'9'},{$set:{'title':'10'}})\ndb.testColl.find().pretty()\ndb.testColl.remove({'title':'0'})\ndb.testColl.remove({'title':'1'})\ndb.testColl.remove({'title':'2'})\ndb.testColl.remove({'title':'3'})\ndb.testColl.remove({'title':'4'})\ndb.testColl.remove({'title':'5'})\ndb.testColl.remove({'title':'6'})\ndb.testColl.remove({'title':'7'})\ndb.testColl.remove({'title':'8'})\ndb.testColl.remove({'title':'9'})\ndb.testColl.find().pretty()\ndb.testColl.drop()\ndb.dropDatabase()\n" } ]
18
benthomasson/ansible-worker-websocket
https://github.com/benthomasson/ansible-worker-websocket
9651ce5271ce212b8bb38fec94e675ac9a43a2f9
a27704f86ba14daa598d421192abea492e6a6e95
20e805c26f899fece4481dc4e2059c0322e62278
refs/heads/master
2020-04-05T04:40:38.961675
2018-11-12T14:43:42
2018-11-12T14:43:42
156,559,997
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5411078929901123, "alphanum_fraction": 0.5434402227401733, "avg_line_length": 29.087718963623047, "blob_id": "1a0bcebfbed22c231ffcfc3c3e38f61433f19a17", "content_id": "38a7c177e1b8104b6391f1be0068bf4c9e5020c2", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1715, "license_type": "permissive", "max_line_length": 72, "num_lines": 57, "path": "/ansible_worker_websocket/client.py", "repo_name": "benthomasson/ansible-worker-websocket", "src_encoding": "UTF-8", "text": "import websocket\nimport json\nimport gevent\nimport traceback\nfrom pprint import pprint\nfrom .messages import serialize, Deploy, Cancel\n\n\nclass WebsocketChannel(object):\n\n def __init__(self, address, outbox):\n self.address = address\n self.start_socket_thread()\n self.outbox = outbox\n\n def start_socket_thread(self):\n print(self.address)\n self.socket = websocket.WebSocketApp(self.address,\n on_message=self.on_message,\n on_error=self.on_error,\n on_close=self.on_close,\n on_open=self.on_open)\n self.thread = gevent.spawn(self.socket.run_forever)\n\n def put(self, message):\n try:\n self.socket.send(json.dumps(serialize(message)))\n except Exception as e:\n print(e)\n print(traceback.format_exc())\n self.thread.kill()\n self.start_socket_thread()\n\n def on_open(self):\n print('WebsocketChannel on_open')\n pass\n\n def on_message(self, message):\n print('WebsocketChannel on_message')\n message = json.loads(message)\n pprint(message)\n if message[0] == \"deploy\":\n self.outbox.put(Deploy(message[1]))\n elif message[0] == \"cancel\":\n self.outbox.put(Cancel())\n\n def on_close(self):\n print('WebsocketChannel on_close')\n self.thread.kill()\n\n def on_error(self, error):\n print('WebsocketChannel on_error', error)\n try:\n self.on_close()\n finally:\n gevent.sleep(1)\n self.start_socket_thread()\n" } ]
1
nancydyc/bookworm
https://github.com/nancydyc/bookworm
4799e0bc069b1dbdeeda781350f73678a3b87864
1b3441cdb7e34b262c152b4d0431e26593af7a42
81009dfcd37d3f9bbb4eff20abfbc2719f112f9f
refs/heads/main
2023-03-14T07:42:50.719008
2021-03-08T04:55:57
2021-03-08T04:55:57
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7085201740264893, "alphanum_fraction": 0.7085201740264893, "avg_line_length": 13.928571701049805, "blob_id": "7ed9cc3956e79f0da9fb635a69718115aeea450a", "content_id": "2ff85786e47345215c6721eb3719cd368a3a5eda", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 223, "license_type": "no_license", "max_line_length": 31, "num_lines": 14, "path": "/seed_database.py", "repo_name": "nancydyc/bookworm", "src_encoding": "UTF-8", "text": "\"\"\"Script to seed database\"\"\"\r\n\r\nimport os\r\nimport json\r\n\r\nimport crud\r\nimport model\r\nimport server\r\n\r\nos.system(\"dropdb bookworm\")\r\nos.system(\"createdb bookworm\")\r\n\r\nmodel.connect_to_db(server.app)\r\nmodel.db.create_all()\r\n" }, { "alpha_fraction": 0.5019999742507935, "alphanum_fraction": 0.5027999877929688, "avg_line_length": 38.046875, "blob_id": "84ca0ade264049a9921abb74389acbdf746e97a5", "content_id": "776fa6da425f2d2259aaf56ad699e92993a79951", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 2500, "license_type": "no_license", "max_line_length": 161, "num_lines": 64, "path": "/static/components/LogIn.jsx", "repo_name": "nancydyc/bookworm", "src_encoding": "UTF-8", "text": "\"use strict;\"\n\nconst useHistory = ReactRouterDOM.useHistory;\n\n\nfunction LogIn(props) {\n const[userEmail, setUserEmail] = React.useState('');\n const[userPassword, setUserPassword] = React.useState('');\n \n let history = useHistory();\n \n function logUserIn(evt) {\n evt.preventDefault();\n \n // const userDetails = {\"email\": document.getElementById(\"login-email\").value,\n // \"password\": document.getElementById(\"login-password\").value};\n\n const userDetails = {\"email\": userEmail,\n \"password\": userPassword};\n \n fetch(\"/login\", {\n method: \"POST\",\n credentials: \"include\",\n body: JSON.stringify(userDetails),\n headers: {\n // 'Accept': 'application/json',\n 'Content-Type': 'application/json'\n },\n // mode: \"cors\"\n })\n .then (response => response.json())\n .then(data => {\n if (\"error\" in data) {\n alert(data[\"error\"]);\n history.push(\"/login\");\n }\n else {\n localStorage.setItem(\"userId\", data[\"user_id\"])\n localStorage.setItem(\"userFirstName\", data[\"user_first_name\"])\n props.setUserLoggedIn({userId: data[\"user_id\"], userFirstName: data[\"user_first_name\"]});\n history.push(\"/user/home/browsing\")\n // redirect using useHistory to a User Detail page -> nav bar (w/ logout and search on top), horizontal row, category and books within for each\n }\n });\n };\n \n\n return (\n <div>\n <h1>Log In</h1>\n <form action=\"/login\" onSubmit={logUserIn}>\n <input type=\"text\" id=\"login-email\" name=\"email\" placeholder=\"Your Email\" onChange={(e) => setUserEmail(e.target.value)} autoFocus required />\n <input type=\"password\" id=\"login-password\" name=\"password\" placeholder=\"Your Password\" onChange={(e) => setUserPassword(e.target.value)} required />\n <input type=\"submit\" value=\"Submit\" />\n </form>\n <p>\n Don't have an account yet? <Link to=\"/create-account\">Create one here!</Link>\n </p>\n </div>\n );\n \n };\n\n// ReactDOM.render(<LogIn />, document.getElementById(\"root\"));\n\n" }, { "alpha_fraction": 0.5318230986595154, "alphanum_fraction": 0.538295567035675, "avg_line_length": 30.034482955932617, "blob_id": "9f353c4968aee26cd29f9efb088431416dffef6d", "content_id": "fdc18b35820e4b574304757e07bb2e93ac2c0ef7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 927, "license_type": "no_license", "max_line_length": 131, "num_lines": 29, "path": "/static/components/SearchResults.jsx", "repo_name": "nancydyc/bookworm", "src_encoding": "UTF-8", "text": "\"use strict\";\r\n\r\nfunction SearchResults(props) {\r\n\r\n const content = []\r\n const [books, setBooks] = React.useState([]);\r\n let bookKey = ''\r\n\r\n React.useEffect(() => {\r\n fetch(`https://www.googleapis.com/books/v1/volumes?q=${props.bookQuery}&maxResults=10`)\r\n .then (response => response.json())\r\n .then (result => setBooks(result.items))\r\n }, [props.bookQuery]) \r\n\r\n // if (books.length === 0) return <div>Loading...</div>\r\n \r\n let i = 0\r\n for (const book of books) {\r\n \r\n bookKey = book.id\r\n // Sometimes the id is the same for two books being returned by Google Books.... \r\n\r\n content.push(<BookTile key={i} book={book} userLoggedIn={props.userLoggedIn} userCategories={props.userCategories} />);\r\n i += 1 \r\n }\r\n \r\n return <div>{content ? content : \"Nothing found!\"}</div>\r\n\r\n}" }, { "alpha_fraction": 0.5684394240379333, "alphanum_fraction": 0.569311261177063, "avg_line_length": 34.78125, "blob_id": "fb717741256fa18b4d1b3492c1959ec634e80de1", "content_id": "87226b9bc4b9a3bc7408a8df7a85c0d37f118484", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1147, "license_type": "no_license", "max_line_length": 129, "num_lines": 32, "path": "/static/components/SearchBar.jsx", "repo_name": "nancydyc/bookworm", "src_encoding": "UTF-8", "text": "\"use strict\";\n\nfunction SearchBar(props) {\n \n let history = useHistory();\n\n function bookSearch(evt) {\n evt.preventDefault();\n props.setBookQuery(document.getElementById(\"book-search\").value)\n\n fetch(\"/categories\")\n .then (response => response.json())\n .then(result => {console.log(result[\"categories\"]); return result;})\n .then((data) => props.setUserCategories(data[\"categories\"]))\n .catch(console.error)\n\n history.push(\"/book-search\")\n document.getElementById(\"search-bar\").reset();\n }\n \n return (\n // <form id=\"search-bar\" onSubmit={bookSearch}>\n <Form id=\"search-bar\" onSubmit={bookSearch} inline>\n <FormControl type=\"text\" id=\"book-search\" placeholder=\"Search for book by title or author\" className=\"ml-sm-2\" />\n {/* <input type=\"text\" id=\"book-search\" placeholder=\"Search for book by title or author\" ></input> */}\n <Button type=\"submit\" variant=\"outline-dark\">Search</Button>\n {/* <input type=\"submit\" ></input> */}\n </Form>\n // </form>\n );\n \n}\n\n\n" }, { "alpha_fraction": 0.4059270918369293, "alphanum_fraction": 0.40846168994903564, "avg_line_length": 36.007408142089844, "blob_id": "74f24ecc65b1e2fffe2d470ed81d44d381c6a751", "content_id": "6c90b9259e6febf8ea4f08f3a31e2d6f2055e8ea", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 5129, "license_type": "no_license", "max_line_length": 145, "num_lines": 135, "path": "/static/components/EventDetails.jsx", "repo_name": "nancydyc/bookworm", "src_encoding": "UTF-8", "text": "\"use strict\";\r\n\r\nfunction EventDetails(props) {\r\n\r\n const {event} = props\r\n const [booksVotedFor, setBooksVotedFor] = React.useState([])\r\n const [eventsBooksVotes, setEventsBooksVotes] = React.useState([])\r\n \r\n let history = useHistory();\r\n\r\n React.useEffect(() => {\r\n fetch(\"/vote\", {\r\n method: \"GET\"\r\n })\r\n .then((response) => response.json())\r\n .then((data) => {\r\n console.log(data)\r\n setBooksVotedFor(data)})\r\n }, [props.changeInEvent])\r\n \r\n const updateEventBooks = (eventId, type) => (evt) => {\r\n evt.preventDefault();\r\n\r\n fetch(\"/event-books\", {\r\n method: \"POST\",\r\n credentials: \"include\",\r\n body: JSON.stringify({\"event_id\": eventId,\r\n \"update_type\": type}),\r\n headers: {\r\n 'Content-Type': 'application/json'\r\n },\r\n })\r\n .then(response => response.json())\r\n .then(data => props.setChangeInEvent(data[\"success\"]))\r\n .then(() => props.setChangeInEvent(null))\r\n }\r\n\r\n const updateVote = (bookIsbn) => (evt) => {\r\n evt.preventDefault();\r\n console.log(\"this ran\")\r\n \r\n fetch(\"/vote\", {\r\n method: \"POST\",\r\n credentials: \"include\",\r\n body: JSON.stringify({\"eventId\": event.id,\r\n \"bookIsbn\": bookIsbn}),\r\n headers: {\r\n \"Content-Type\": \"application/json\"\r\n },\r\n })\r\n .then(response => response.json())\r\n .then(result => {\r\n console.log(\"This is the result\",result)\r\n setBooksVotedFor(result[\"booksVotedFor\"]); \r\n return result;\r\n })\r\n .then(data => {\r\n if (\"error\" in data) {\r\n alert(data[\"error\"])\r\n }\r\n else {\r\n alert(data[\"success\"])\r\n setEventsBooksVotes(data[\"allEventsBooks\"])\r\n }\r\n console.log(data)\r\n })\r\n }\r\n\r\n return (\r\n (<div>\r\n <h2>On {event.event_date.slice(0,16)}, you are {props.type === \"hosting\" ? \"hosting\" : \"attending\"} a book club!</h2>\r\n {props.type === \"hosting\" ? null : <h3>Hosted by: {event.host}</h3>}\r\n <h3>Time: {event.start_time} to {event.end_time}</h3>\r\n <h3>Location: {event.city}</h3>\r\n {event.books === [] \r\n ? null \r\n : <div className=\"book-tile\">\r\n <h3>Book Suggestions</h3>\r\n {event.books.map(book => \r\n // {eventsBooksVotes.map(eventBook => \r\n // {eventBook.isbn === book.isbn && eventBook.is_the_one ? \r\n (<div className=\"event-book\">\r\n <Book key={book.isbn} book={book} setBookForDetails={props.setBookForDetails} categoryLabel=\"event\" eventId={event.id} />\r\n {event.can_vote \r\n ? <div>\r\n {eventsBooksVotes.map(eventBook => \r\n <div>\r\n {eventBook.isbn === book.isbn ? eventBook.vote_count : null}\r\n </div>\r\n )}\r\n <button className=\"vote\" id={book.isbn} onClick={updateVote(book.isbn)}>\r\n {booksVotedFor[event.id] && booksVotedFor[event.id].includes(book.isbn) ? \"Unvote\" : \"Vote\"}\r\n </button> \r\n </div> \r\n : null}\r\n </div>)\r\n // : null}\r\n // )}\r\n )}\r\n </div>\r\n }\r\n {props.type === \"hosting\" ? \r\n <div>\r\n {event.can_add_books \r\n ? <div>\r\n <button onClick={updateEventBooks(event.id, \"suggest\")}>\r\n Stop Book Suggestions\r\n </button>\r\n <button onClick={() => history.push(`/user/${event.id}/${props.type}`)}>\r\n Suggest a Book\r\n </button>\r\n </div> \r\n : <button onClick={updateEventBooks(event.id, \"suggest\")}>\r\n Allow Book Suggestions\r\n </button>\r\n }\r\n {event.can_vote /*&& !event.can_add_books */\r\n ? <button onClick={updateEventBooks(event.id, \"vote\")}>\r\n Stop the Voting\r\n </button> \r\n : <button onClick={updateEventBooks(event.id, \"vote\")}>\r\n Start the Voting\r\n </button>\r\n }\r\n </div>\r\n : \r\n <div>\r\n {event.can_add_books \r\n ? <button onClick={() => history.push(`/user/${event.id}/${props.type}`)}>Suggest a Book</button> \r\n : null}\r\n </div>\r\n }\r\n </div>)\r\n )\r\n}" }, { "alpha_fraction": 0.5037729144096375, "alphanum_fraction": 0.5059288740158081, "avg_line_length": 38.36231994628906, "blob_id": "3ba521f36df3a966b592a8fac735ff2208454953", "content_id": "85c18605c03c6b243ffc0b484c9ee57aec82ae13", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 2783, "license_type": "no_license", "max_line_length": 117, "num_lines": 69, "path": "/static/components/CategoryContainer.jsx", "repo_name": "nancydyc/bookworm", "src_encoding": "UTF-8", "text": "\"use strict\";\r\n\r\nfunction CategoryContainer(props) {\r\n \r\n // const [newLabel, setNewLabel] = React.useState(null)\r\n const labelChange = React.useRef(\"\");\r\n const booksInCategory = []\r\n let history = useHistory();\r\n console.log(\"This is new label\",props. newLabel)\r\n console.log(\"THIS is label up top\", props.label)\r\n\r\n const showForm = (arg) => (evt) => {\r\n evt.preventDefault();\r\n\r\n if(arg === 1) {\r\n document.getElementById(`change-label-${props.label}`).style.visibility=\"visible\";\r\n }\r\n else if (arg === 0) {\r\n document.getElementById(`change-label-${props.label}`).style.visibility=\"hidden\";\r\n document.getElementById(`change-label-${props.label}`).reset();\r\n }\r\n }\r\n\r\n const updateCategory = (evt) => {\r\n evt.preventDefault();\r\n console.log(\"This is the labelChange.curretn useRef:\", labelChange.current)\r\n fetch(\"/update-category\", {\r\n method: \"POST\",\r\n credentials: \"include\",\r\n body: JSON.stringify({\"old_label\": props.label,\r\n \"new_label\": labelChange.current}),\r\n headers: {\r\n 'Content-Type': 'application/json'\r\n },\r\n })\r\n .then(response => response.json())\r\n .then(data => {\r\n alert(data[\"success\"])\r\n props.setNewLabel(data.label)\r\n document.getElementById(`change-label-${props.label}`).style.visibility=\"hidden\";\r\n document.getElementById(`change-label-${props.label}`).reset();\r\n })\r\n }\r\n\r\n\r\n for (const book of props.books) {\r\n booksInCategory.push(<Book key={book.isbn} \r\n book={book} \r\n setBookForDetails={props.setBookForDetails} \r\n categoryLabel={props.label} \r\n eventId={props.eventId}\r\n type={props.type} />)\r\n }\r\n \r\n return ( \r\n <div>\r\n <h1>{props.label}</h1>\r\n <form id={`change-label-${props.label}`} onSubmit={updateCategory} style={{visibility: \"hidden\"}} >\r\n <input type=\"text\" placeholder={props.label} onChange={(e) => labelChange.current = (e.target.value)} />\r\n <input type=\"button\" value=\"Nevermind\" onClick={showForm(0)} />\r\n <input type=\"submit\" />\r\n </form>\r\n <button onClick={showForm(1)}>Change Category Name</button>\r\n {/* <div className=\"scroll-shelf\" >{booksInCategory}</div> */}\r\n <div>{booksInCategory}</div>\r\n <img src=\"/static/img/single-shelf.PNG\" alt=\"\"/>\r\n </div>\r\n )\r\n}" }, { "alpha_fraction": 0.45082953572273254, "alphanum_fraction": 0.45515748858451843, "avg_line_length": 37.11320877075195, "blob_id": "8bc878d812d2aa600e55d3ad6203a7e579abc237", "content_id": "79c4ef10a78145cb5a5dd5c8b708630d7ec26cd4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 4159, "license_type": "no_license", "max_line_length": 159, "num_lines": 106, "path": "/static/components/BookTile.jsx", "repo_name": "nancydyc/bookworm", "src_encoding": "UTF-8", "text": "\"use strict\";\r\n\r\nfunction BookTile(props) {\r\n const { book } = props;\r\n let history = useHistory();\r\n const [categoryName, setCategoryName] = React.useState(props.userLoggedIn.userId ? props.userCategories[0].label : \"\");\r\n\r\n const addNewSelect = () => {\r\n \r\n for (let i = 1; i < 11; i += 1) {\r\n let newCategory = document.forms[i].newCategory;\r\n let chooseCategory = document.forms[i].chooseCategory;\r\n\r\n if (chooseCategory.options[chooseCategory.selectedIndex].value === \"add-new\") { \r\n newCategory.style.visibility = \"visible\"\r\n }\r\n else {\r\n newCategory.style.visibility = \"hidden\";\r\n }\r\n }\r\n }\r\n\r\n function addToCategory(evt) {\r\n evt.preventDefault();\r\n\r\n console.log(categoryName)\r\n let categoryDetails = {\"label\": categoryName,\r\n \"book\": book}\r\n\r\n fetch(\"/add-book-to-category\", {\r\n method: \"POST\",\r\n credentials: \"include\",\r\n body: JSON.stringify(categoryDetails),\r\n headers: {\r\n 'Content-Type': 'application/json'\r\n },\r\n })\r\n .then (response => response.json())\r\n .then(data => {\r\n if (\"error\" in data) {\r\n alert(data[\"error\"]);\r\n }\r\n else {\r\n alert(data[\"success\"]);\r\n history.push(\"/user/home/browsing\")\r\n }\r\n })\r\n document.getElementById(\"add-to-category\").reset();\r\n setCategoryName(props.userCategories[0].label)\r\n\r\n for (let i = 1; i < 11; i += 1) {\r\n document.forms[i].newCategory.style.visibility = \"hidden\";\r\n \r\n }\r\n }\r\n\r\n \r\n if (props.userLoggedIn.userId) {\r\n return (\r\n <div className=\"book-tile\">\r\n <img src={book.volumeInfo.imageLinks ? book.volumeInfo.imageLinks.thumbnail : \"/static/img/no_book_cover.png\"} alt=\"Book Cover\" />\r\n <h2>{book.volumeInfo.title}</h2>\r\n {book.volumeInfo.authors ? book.volumeInfo.authors.map(author => \r\n (<div>\r\n <h4>{author}</h4>\r\n </div>)) : ''\r\n }\r\n <p>{book.volumeInfo.description}</p>\r\n \r\n <form id=\"add-to-category\" onSubmit={addToCategory} >\r\n <label htmlFor=\"category-add\">\r\n Add to your bookshelf\r\n </label>\r\n <select id=\"category-add\" name=\"chooseCategory\" onChange={(e) => {\r\n {/* <select id={book.key} name=\"chooseCategory\" onChange={(e) => { */}\r\n setCategoryName(e.target.value);\r\n addNewSelect();}} >\r\n {props.userCategories.map(category => \r\n (<option value={category.label} >{category.label} </option>))\r\n }\r\n <option value=\"add-new\">Add New Category</option>\r\n </select>\r\n <input type=\"text\" name=\"newCategory\" id=\"new-category\" style={{visibility: \"hidden\"}} onChange={(e) => setCategoryName(e.target.value)} />\r\n <input type=\"submit\" />\r\n </form>\r\n <hr/>\r\n </div>\r\n )}\r\n \r\n else {\r\n return (\r\n <div className=\"book-tile\">\r\n <img src={book.volumeInfo.imageLinks ? book.volumeInfo.imageLinks.thumbnail : \"/static/img/no_book_cover.png\"} alt=\"Book Cover\" />\r\n <h2>{book.volumeInfo.title}</h2>\r\n {book.volumeInfo.authors ? book.volumeInfo.authors.map(author => \r\n (<div>\r\n <h4>{author}</h4>\r\n </div>)) : ''\r\n }\r\n <p>{book.volumeInfo.description}</p>\r\n <Link to=\"/create-account\">Create an account to add a book to your shelf!</Link>\r\n <hr/>\r\n </div>\r\n )\r\n }\r\n}\r\n \r\n\r\n " }, { "alpha_fraction": 0.43567517399787903, "alphanum_fraction": 0.4416058361530304, "avg_line_length": 32.28125, "blob_id": "232f3f6187fcf22db58b3a96c724e83dc333892c", "content_id": "73b4f4167930c6ca81dc2799692cd1e02de79b3d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 2192, "license_type": "no_license", "max_line_length": 142, "num_lines": 64, "path": "/static/components/AllEvents.jsx", "repo_name": "nancydyc/bookworm", "src_encoding": "UTF-8", "text": "\"use strict\";\r\n\r\nfunction AllEvents(props) {\r\n const [allEvents, setAllEvents] = React.useState({})\r\n const [currentEvent, setCurrentEvent] = React.useState(null)\r\n let history = useHistory();\r\n\r\n const addAttendee = () => {\r\n \r\n console.log(currentEvent)\r\n \r\n fetch('/add-attendee', {\r\n method: \"POST\",\r\n credentials: \"include\",\r\n body: JSON.stringify({\"event\": currentEvent}),\r\n headers: {\r\n 'Content-Type': 'application/json'\r\n },\r\n })\r\n .then(response => response.json())\r\n .then(data => {\r\n if (\"error\" in data) {\r\n alert(data[\"error\"]);\r\n }\r\n else {\r\n alert(data[\"success\"])\r\n history.push(\"/user-events\")\r\n } \r\n })\r\n}\r\n\r\n React.useEffect(() => {\r\n if (currentEvent) addAttendee();\r\n }, [currentEvent])\r\n\r\n React.useEffect(() => {\r\n fetch(\"/all-events\")\r\n .then(response => response.json())\r\n .then(data => setAllEvents(data))\r\n }, [])\r\n\r\n return (\r\n <div>\r\n <hr/>\r\n <h1>Book Club Meetings</h1>\r\n {allEvents.events ? allEvents.events.map(event => \r\n (<div>\r\n <h2>There is a book club on {event.event_date.slice(0,16)}!</h2>\r\n <h3>Location: {event.city} </h3>\r\n <h3>Hosted By: {event.host.first_name} {event.host.last_name}</h3>\r\n <h3>Attendees</h3>\r\n {event.attending.map(attendee => \r\n (<p value={`${attendee.first_name} ${attendee.last_name}`} >{`${attendee.first_name} ${attendee.last_name}`}</p>))\r\n }\r\n {props.userLoggedIn.userId \r\n ? <input type=\"button\" value=\"Attend\" id={event.id} onClick={(e) => {setCurrentEvent(e.target.id)}} />\r\n : <Link to=\"/create-account\">Create an account or log in to attend an event</Link>}\r\n <hr/>\r\n </div>)) : ''\r\n }\r\n </div>\r\n )\r\n\r\n}" }, { "alpha_fraction": 0.4539170563220978, "alphanum_fraction": 0.4539170563220978, "avg_line_length": 32.79999923706055, "blob_id": "d002fa967bb8ff4382eea1d4313e2971b3e4a0c2", "content_id": "3998a6338dc7add118e09d72b85b79b91bc1a8f6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 868, "license_type": "no_license", "max_line_length": 76, "num_lines": 25, "path": "/tests.py", "repo_name": "nancydyc/bookworm", "src_encoding": "UTF-8", "text": "import unittest\r\nimport server\r\nimport crud\r\n\r\n\r\nclass MyAppUnitTestCase(unittest.TestCase):\r\n\r\n def test_get_user_by_email(self):\r\n # client = server.app.test_client()\r\n # result = crud.get_user_by_email(\"[email protected]\")\r\n # self.assertIn(b\"email='[email protected]'>\", result.data)\r\n # # crud.create_new_user()\r\n \r\n # client = server.app.test_client()\r\n # result = client.post(\"/users\", data={\"first_name\": \"Hunter\",\r\n # \"last_name\": \"Laine\",\r\n # \"email\": \"[email protected]\",\r\n # \"password\": \"test\",\r\n # \"city\": \"San Francisco\",\r\n # \"state\": \"CA\"})\r\n # self.assertIn(b\"\"\"{\"user\": {\"first_name\": Hunter,\"\"\", result.data)\r\n\r\n\r\nif __name__ == \"__main__\":\r\n unittest.main()" }, { "alpha_fraction": 0.5545202493667603, "alphanum_fraction": 0.5545202493667603, "avg_line_length": 37.94117736816406, "blob_id": "f0fe331b89ae896e6929cefa217d289791eedba5", "content_id": "a41c60c282c1a9482a642ebf7f323d31e47ea276", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 3971, "license_type": "no_license", "max_line_length": 119, "num_lines": 102, "path": "/static/components/App.jsx", "repo_name": "nancydyc/bookworm", "src_encoding": "UTF-8", "text": "\"use strict\";\n\nconst Router = ReactRouterDOM.BrowserRouter;\nconst Link = ReactRouterDOM.Link;\nconst Switch = ReactRouterDOM.Switch;\nconst Route = ReactRouterDOM.Route;\nconst useHistory = ReactRouterDOM.useHistory;\nconst useParams = ReactRouterDOM.useParams;\n\nconst Navbar = ReactBootstrap.Navbar;\nconst Nav = ReactBootstrap.Nav;\nconst Form = ReactBootstrap.Form;\nconst FormControl = ReactBootstrap.FormControl;\nconst Button = ReactBootstrap.Button;\nconst Carousel = ReactBootstrap.Carousel;\n\n\nfunction App() {\n\n const [userLoggedIn, setUserLoggedIn] = React.useState({userId: null, userFirstName: null});\n\n const [bookQuery, setBookQuery] = React.useState(null);\n const [userCategories, setUserCategories] = React.useState();\n const [bookshelfCategories, setBookshelfCategories] = React.useState([]);\n const [bookForDetails, setBookForDetails] = React.useState({});\n const [eventForDetails, setEventForDetails] = React.useState({});\n const [newLabel, setNewLabel] = React.useState(null)\n \n\n React.useEffect(() => {\n if (localStorage.getItem(\"userId\") !== \"null\") {\n setUserLoggedIn({userId: localStorage.getItem(\"userId\"), userFirstName: localStorage.getItem(\"userFirstName\")})\n }\n }, [])\n\n return (\n <Router>\n <div>\n <TopNavigation\n bookQuery={bookQuery} \n setBookQuery={setBookQuery} \n userCategories={userCategories} \n setUserCategories={setUserCategories}\n userLoggedIn={userLoggedIn} />\n <Switch>\n <Route path=\"/login\">\n <LogIn userLoggedIn={userLoggedIn} setUserLoggedIn={setUserLoggedIn} />\n </Route>\n <Route path=\"/login\">\n <LogIn userLoggedIn={userLoggedIn} setUserLoggedIn={setUserLoggedIn} />\n </Route>\n <Route path=\"/logout\">\n <LogOut userLoggedIn={userLoggedIn} setUserLoggedIn={setUserLoggedIn} />\n </Route>\n <Route path=\"/create-account\">\n <CreateAccount />\n </Route>\n <Route exact path=\"/user/:eventId/:type\">\n <UserPage \n userLoggedIn={userLoggedIn}\n userCategories={userCategories}\n setBookshelfCategories={setBookshelfCategories}\n bookshelfCategories={bookshelfCategories}\n setBookForDetails={setBookForDetails}\n newLabel={newLabel}\n setNewLabel={setNewLabel}\n />\n </Route>\n <Route exact path=\"/update-account-info\">\n <UpdateAccount \n userLoggedIn={userLoggedIn}\n />\n </Route>\n <Route path=\"/book-search\">\n <SearchResults \n bookQuery={bookQuery} \n userLoggedIn={userLoggedIn} \n userCategories={userCategories}\n setUserCategories={setUserCategories} />\n </Route>\n <Route exact path=\"/book-details/:categoryLabel/:eventId\" >\n <BookDetails bookForDetails={bookForDetails} />\n </Route>\n <Route path=\"/create-event\" >\n <CreateEvent userLoggedIn={userLoggedIn} />\n </Route>\n <Route path=\"/user-events\" >\n <UserEvents userLoggedIn={userLoggedIn}\n setBookForDetails={setBookForDetails}\n setEventForDetails={setEventForDetails} />\n </Route>\n <Route path=\"/all-events\" >\n <AllEvents userLoggedIn={userLoggedIn} />\n </Route>\n </Switch>\n </div>\n </Router>\n );\n }\n\n\nReactDOM.render(<App />, document.getElementById(\"root\"));" }, { "alpha_fraction": 0.611600935459137, "alphanum_fraction": 0.6255220174789429, "avg_line_length": 22.172042846679688, "blob_id": "222cdefc57a1a7149f4d5f22ec6f3107a3cdc00a", "content_id": "6fbaa7eaa91b893d72443aaeffc3680e0a003049", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2155, "license_type": "no_license", "max_line_length": 89, "num_lines": 93, "path": "/test_seed.py", "repo_name": "nancydyc/bookworm", "src_encoding": "UTF-8", "text": "\"\"\"Script to seed test database\"\"\"\n\nimport os\nimport json\n\nimport crud\nimport model\nimport server\nfrom random import choice\n\nos.system(\"dropdb testbookworm\")\nos.system(\"createdb testbookworm\")\n\nmodel.connect_to_db(server.app, \"testbookworm\")\nmodel.db.create_all()\n\n\n#create fake books for testing\nnew_books = []\n\nfor n in range(10):\n isbn = f\"testing12345{n}\"\n title = f\"Test Title Book {n}\"\n author = f\"Test Author {n}\"\n description = f\"Test overview for book {n}\"\n page_length = n\n image = f\"Test image path {n}\"\n\n new_book = crud.create_book(isbn, title, author, description, page_length, \n image)\n\n new_books.append(new_book)\n\n\n#create fake users for testing\nnew_users = []\n\nfor n in range(6):\n first_name = f\"Test User First {n}\"\n last_name = f\"Test User Last {n}\"\n email = f\"user{n}@test.com\"\n password = \"test\"\n\n new_user = crud.create_user(first_name, last_name, email, password)\n\n new_users.append(new_user)\n\n\n# Create fake events for testing\nnew_events = []\n\nfor n in range(6):\n\n host_id = n + 1\n city = f\"City {n}\"\n state = f\"S{n}\"\n event_date = f\"2021-02-1{n}\"\n start_time = f\"1{n}:00\"\n end_time = f\"20:1{n}\"\n\n new_event = crud.create_event(host_id, city, event_date, start_time, end_time, state)\n\n new_events.append(new_event)\n\n # Create fake user_events for testing\n for n in range(4): \n random_user = choice(new_users)\n event = new_event\n crud.create_event_attendee(random_user.id, event.id)\n\n # Create fake event_books for testing\n random_book = choice(new_books)\n crud.create_event_book(event, random_book) # CHANGE\n\n\n# Create fake categories for testing\nnew_categories = []\n\nfor n in range(6):\n\n user_id = n + 1\n for i in range(10):\n label = f\"Category {i}\"\n\n new_category = crud.create_category(user_id, label)\n\n new_categories.append(new_category)\n\n # Create fake book_categories for testing\n for n in range(1,6):\n random_book = choice(new_books)\n category = new_category\n book_categories = crud.create_book_category(random_book, category)\n" }, { "alpha_fraction": 0.6014670133590698, "alphanum_fraction": 0.6039119958877563, "avg_line_length": 22.117647171020508, "blob_id": "02dbb4d154799ec2701e9810f38fec7ab074230e", "content_id": "95c9d959c93b0010edc24b9b6a67ffabb819a828", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 409, "license_type": "no_license", "max_line_length": 106, "num_lines": 17, "path": "/static/components/Book.jsx", "repo_name": "nancydyc/bookworm", "src_encoding": "UTF-8", "text": "\"use strict\";\r\n\r\nfunction Book(props) {\r\n\r\n let history = useHistory();\r\n\r\n const goToDetails = () => {\r\n props.setBookForDetails(props.book)\r\n history.push(`/book-details/${props.categoryLabel}/${props.eventId}`)\r\n }\r\n\r\n return (\r\n <img src={props.book.image} className=\"book-cover\" alt={props.book.title} onClick={goToDetails} />\r\n )\r\n}\r\n\r\nclassName=\"d-inline-block w-5\" " } ]
12
xiaomojie/NowCoder
https://github.com/xiaomojie/NowCoder
7e5504f4ffb3e42aee8f0a47749c438ee45a5f4f
e37814ad0b4e2061b9afbfb26d4502acab503b4f
907d550e9f772685c80a0e3365793221494717f1
refs/heads/master
2020-04-24T09:32:42.648538
2019-10-23T10:40:18
2019-10-23T10:40:18
171,863,202
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.25440001487731934, "alphanum_fraction": 0.29760000109672546, "avg_line_length": 28.809524536132812, "blob_id": "89ddbb68fca4a565963e3ebf30baeb2106f15fea", "content_id": "c9fecce42f2036b995daef1cb29a73d6704b8f74", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 625, "license_type": "no_license", "max_line_length": 88, "num_lines": 21, "path": "/面试与笔试/笔试/iqiyi/4.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "class Solution {\npublic:\n int f(string S) {\n int N = S.length() + 1;\n int MOD = 1e9 + 7;\n int dp[202][202] = {};\n dp[1][1] = 1;\n for (int i = 2; i <= N; ++i) {\n // length is i\n for (int j = 1; j <= i; ++j) {\n // end with j\n if (S[i - 2] == 1) {\n dp[i][j] = (dp[i][j-1] + (dp[i-1][i-1] - dp[i-1][j-1]) % MOD) % MOD;\n } else {\n dp[i][j] = (dp[i][j-1] + (dp[i-1][j-1] - dp[i-1][0]) % MOD) % MOD;\n }\n }\n }\n return (dp[N][N] + MOD) % MOD;\n }\n};" }, { "alpha_fraction": 0.5302926898002625, "alphanum_fraction": 0.5479918122291565, "avg_line_length": 30.276596069335938, "blob_id": "894ab9ed82de5ff6f1ae73d1863edb2e6943d5f7", "content_id": "bd16ae5cba6d9aa2644de2c0534ed92730a4e15c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2213, "license_type": "no_license", "max_line_length": 66, "num_lines": 47, "path": "/offer/11.旋转数组的最小数字.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n题目描述\n把一个数组最开始的若干个元素搬到数组的末尾,我们称之为数组的旋转。 输入一个非减排序的\n数组的一个旋转,输出旋转数组的最小元素。 例如数组{3,4,5,1,2}为{1,2,3,4,5}的一个旋转,\n该数组的最小值为1。 NOTE:给出的所有元素都大于0,若数组大小为0,请返回0。\n\"\"\"\nclass Solution:\n def minNumberInRotateArray(self, rotateArray):\n # 本体的数组在一定程度上是有序的,可以考虑使用二分查找,但是特别要注意一些\n # 特例情况,设置头指针s和尾指针e:\n # 1. 如果中间元素m位于前半段,则它应该大于等于第一个元素,将s指向m\n # 2. 如果中间元素m位于后半段,则它应该小于等于最后一个元素,将e指向m\n # 按照上面思路,s总是指向前面递增数组的元素,e总是指向后面递增数组的元素,\n # 最终他们会指向相邻的元素,此时e指向的则为最小。\n # 特例:\n # a. 把排序数组的前面0个元素放到后面,此时最小为第一个\n # b. 数组中存在相同的元素,当s,e,m相等的时候,就无法判断最小值到底是在\n # 前半段还是后半段了,此时不得不采用顺序查找的方法\n # 顺序查找O(n),二分查找O(logn)\n if len(rotateArray) == 0:\n return 0\n s, e = 0, len(rotateArray) - 1\n m = s # 为了特例a\n while rotateArray[s] >= rotateArray[e]:\n if e - s == 1:\n m = e\n break\n m = (s + e) // 2\n if rotateArray[m] == rotateArray[s] == rotateArray[e]:\n return self.MinInOrder(rotateArray, s, e)\n if rotateArray[m] >= rotateArray[s]:\n s = m\n elif rotateArray[m] <= rotateArray[e]:\n e = m\n return rotateArray[m]\n\n def MinInOrder(self, rotateArray, s, e):\n res = 0\n while s <= e:\n if res > rotateArray[s]:\n res = rotateArray[s]\n s += 1\n return res\n\n\nprint(Solution().minNumberInRotateArray([]))" }, { "alpha_fraction": 0.3703382611274719, "alphanum_fraction": 0.40893322229385376, "avg_line_length": 27.837499618530273, "blob_id": "419eae307cd8d4997b7901e37de2ffea978cbe13", "content_id": "1a3765689e9c25b34bbbbbb42f012ee69ec14805", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2306, "license_type": "no_license", "max_line_length": 74, "num_lines": 80, "path": "/面试与笔试/笔试/tt/3.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\ndir = int(input().strip())\nmatrix = []\nfor i in range(4):\n matrix.append(list(map(int, sys.stdin.readline().strip().split(' '))))\n\nif dir == 1:\n for i in range(0, 3):\n for j in range(0, 4):\n if matrix[i][j] == matrix[i+1][j]:\n matrix[i][j] *= 2\n matrix[i + 1][j] = 0\n for j in range(4):\n column = [0] * 4\n index = 0\n for i in range(0, 4):\n if matrix[i][j] != 0:\n column[index] = matrix[i][j]\n index += 1\n for i in range(0, 4):\n matrix[i][j] = column[i]\n for i in range(4):\n print(' '.join([str(x) for x in matrix[i]]))\n\nif dir == 2:\n for i in range(3, 0, -1):\n for j in range(0, 4):\n if matrix[i][j] == matrix[i-1][j]:\n matrix[i][j] *= 2\n matrix[i - 1][j] = 0\n\n for j in range(4):\n column = [0] * 4\n index = 0\n for i in range(3, -1, -1):\n if matrix[i][j] != 0:\n column[index] = matrix[i][j]\n index += 1\n for i in range(3, -1, -1):\n matrix[i][j] = column[3-i]\n for i in range(4):\n print(' '.join([str(x) for x in matrix[i]]))\n\nif dir == 3:\n for i in range(0, 3):\n for j in range(0, 4):\n if matrix[j][i] == matrix[j][i+1]:\n matrix[j][i] *= 2\n matrix[j][i+1] = 0\n\n for j in range(4):\n column = [0] * 4\n index = 0\n for i in range(0, 4):\n if matrix[j][i] != 0:\n column[index] = matrix[j][i]\n index += 1\n for i in range(0, 4):\n matrix[j][i] = column[i]\n for i in range(4):\n print(' '.join([str(x) for x in matrix[i]]))\n\nif dir == 4:\n for i in range(3, 0, -1):\n for j in range(0, 4):\n if matrix[j][i] == matrix[j][i-1]:\n matrix[j][i] *= 2\n matrix[j][i-1] = 0\n\n for j in range(4):\n column = [0] * 4\n index = 0\n for i in range(3, -1, -1):\n if matrix[j][i] != 0:\n column[index] = matrix[j][i]\n index += 1\n for i in range(3, -1, -1):\n matrix[j][i] = column[3-i]\n for i in range(4):\n print(' '.join([str(x) for x in matrix[i]]))" }, { "alpha_fraction": 0.42290249466896057, "alphanum_fraction": 0.4433106482028961, "avg_line_length": 20.0238094329834, "blob_id": "3c977f22f381a82ef48a100ef8b9ff93b5508fad", "content_id": "db0334b391b3b6e665dbb60e1c2cc9529f235099", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 932, "license_type": "no_license", "max_line_length": 67, "num_lines": 42, "path": "/面试与笔试/笔试/ks/3.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "// 本题为考试多行输入输出规范示例,无需提交,不计分。\n#include <iostream>\n#include <cstdio>\n#include <vector>\n#include <string>\n\nusing namespace std;\n\nint main(){\n //freopen(\"1.in\",\"r\",stdin);\n int N;\n cin >> N;\n int dis[N];\n int wei[N];\n for(int i=0;i<N;i++){\n scanf(\"%d\",&dis[i]);\n }\n for(int i=0;i<N;i++){\n scanf(\"%d\",&wei[i]);\n }\n int maxdis = 0;\n int res=0;\n int maxid,maxwei,len;\n int mark[N]={0};\n for(int i=0;i<N;i++){\n maxid = 0;\n maxwei = 0;\n len = 0;\n for(int j=0;j<N;j++){\n if(mark[j]==0&&maxwei<(wei[j]+2*max(0,dis[j]-maxdis))){\n maxid =j;\n maxwei = (wei[j]+2*max(0,dis[j]-maxdis));\n len = dis[j];\n }\n }\n res = res + maxwei;\n mark[maxid] =1;\n maxdis = max(maxdis,len);\n cout<<res<<endl;\n }\n return 0;\n}" }, { "alpha_fraction": 0.3708029091358185, "alphanum_fraction": 0.4058394134044647, "avg_line_length": 31.619047164916992, "blob_id": "efb49100140a3be1aaaad4d565f2011d691f6e37", "content_id": "1cad8c6e379ec661e27f9d016de197ce74bf61da", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 743, "license_type": "no_license", "max_line_length": 60, "num_lines": 21, "path": "/最长公共(子)/最长公共子序列.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n求LCS (Longest Common Subsequence)\n动态规划:\n用二维数组dp[i][j]记录串x1 x2 ...xi 和 y1 y2 ... yj的LCS长度,则可得到状态转移方程\n 0 i = 0 or j\ndp[i][j] = dp[i-1][j-1] xi = yj\n max(dp[i][j-1], dp[i-1][j]) xi != yj\n\"\"\"\n\nclass Solution:\n def LSC(self, x, y):\n dp = [[0] * (len(y)+1) for i in (range(len(x) + 1))]\n for i in range(1, len(x) + 1):\n for j in range(1, len(y) + 1):\n if x[i-1] == y[j-1]:\n dp[i][j] = dp[i-1][j-1] + 1\n else:\n dp[i][j] = max(dp[i-1][j], dp[i][j-1])\n return dp[len(x)][len(y)]\n\nprint(Solution().LSC('ABCBDAB', 'BDCABA'))\n" }, { "alpha_fraction": 0.45266273617744446, "alphanum_fraction": 0.4792899489402771, "avg_line_length": 23.581817626953125, "blob_id": "b1d2a1f4763aa991d4537a63016d7a4bb1685892", "content_id": "44282d4423a9b5617077a9fc8729186386ca00d7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1352, "license_type": "no_license", "max_line_length": 74, "num_lines": 55, "path": "/面试与笔试/笔试/tt/4.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\ndef yinshu(x):\n fac = [x]\n for i in range(2, x):\n if x % i == 0:\n fac.append(i)\n continue\n else:\n pass\n # if len(fac) == 0:\n # return []\n # else:\n return fac\n\n# print(yinshu(2))\n# print(yinshu(3))\n# print(yinshu(4))\n# print(yinshu(6))\n# print(yinshu(8))\n# print(yinshu(9))\n\nn = int(input().strip())\nnum = list(map(int, sys.stdin.readline().strip().split(' ')))\nfac = []\nfor x in num:\n fac.append(yinshu(x))\n\nprint(fac)\nres = [[1, list(set(fac[0]))]]\nfor i in range(1, len(fac)):\n for j in range(len(res)):\n if len(list(set(res[j][1]).intersection(set(fac[i])))):\n # print(res[j][1])\n # print(set(res[j][1]).union(set(fac[i])))\n print(list(set(res[j][1]).union(set(fac[i]))))\n res[j][1] = list(set(res[j][1]).union(set(fac[i])))\n res[j][0] += 1\n print(fac)\n else:\n res.append((1, set(fac[i])))\nprint(res)\n\nfor i in range(len(res)):\n for j in range(i+1, len(res)):\n if len(res[i][1]) and set(res[i][1]).intersection(set(res[j][1])):\n res[i][1] = list(set(res[i][1]).union(set(res[j][1])))\n res[i][0] += 1\n res[j][0] = 0\n res[j][1] = []\n\ncount = 0\nfor i in range(len(res)):\n if res[i][0]:\n count += 1\nprint(count)\n" }, { "alpha_fraction": 0.5653568506240845, "alphanum_fraction": 0.597433865070343, "avg_line_length": 18.5, "blob_id": "5991b2af8919b735b4da6425a8b0921e72df8839", "content_id": "c7aab01dee56658ffb8d5ef53b2f1a441d705089", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1269, "license_type": "no_license", "max_line_length": 57, "num_lines": 64, "path": "/offer/36.二叉搜索树与双向链表.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\nclass TreeNode:\n def __init__(self, x):\n self.val = x\n self.left = None\n self.right = None\n\n\nclass Solution:\n def Convert(self, pRootOfTree):\n # write code here\n if not pRootOfTree:\n return None\n head = cur = TreeNode(0)\n self.ConvertTreeToList(pRootOfTree, cur)\n head = head.right\n head.left = None\n return head\n\n def ConvertTreeToList(self, root, cur):\n\n if root.left:\n cur = self.ConvertTreeToList(root.left, cur)\n cur.right = root\n root.left = cur\n cur = root\n if root.right:\n cur = self.ConvertTreeToList(root.right, cur)\n return cur\n\n\n# {10,6,14,4,8,12,16}\n\nroot = TreeNode(10)\nleft1 = TreeNode(6)\nright1 = TreeNode(14)\nroot.left = left1\nroot.right = right1\n\nleft2 = TreeNode(4)\nright2 = TreeNode(8)\nleft1.left = left2\nleft1.right = right2\n\nright1.left = TreeNode(12)\nright1.right = TreeNode(16)\n# cur = root\n# while cur:\n# print(cur.val)\n# cur = cur.right\n\nhead = Solution().Convert(root)\ncur = head\n# print(head)\nwhile cur.right:\n print(cur.val)\n cur = cur.right\n\nprint()\nwhile cur:\n print(cur.val)\n cur = cur.left\n\n# 调试的时候10的right还是14的left不对" }, { "alpha_fraction": 0.5610412955284119, "alphanum_fraction": 0.5798922777175903, "avg_line_length": 24.930233001708984, "blob_id": "7da72cc922dffded1146843f8fc55cc87bcebbff", "content_id": "9a0c9be33074b1277620a37ba236525bf71dd7bc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1162, "license_type": "no_license", "max_line_length": 102, "num_lines": 43, "path": "/二叉树的遍历/后续遍历.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "class BiTree:\n def __init__(self, x):\n self.val = x\n self.left = None\n self.right = None\n\nclass SolutionRecursion:\n def postorder_traversal(self, root):\n if not root:\n return []\n return self.postorder_traversal(root.left) + self.postorder_traversal(root.right) + [root.val]\n\n\nclass SolutionCirculation:\n def postorder_traversal(self, root):\n # 左右中,其逆序为中右左,先求中右左在求数组的逆序\n res = []\n if not root:\n return res\n stack = []\n p = root\n while p or len(stack):\n if p:\n res.append(p.val)\n stack.append(p)\n p = p.right\n else:\n p = stack.pop().left\n return res[::-1]\nroot = BiTree(45)\na = BiTree(12)\nb = BiTree(53)\nroot.left = a\nroot.right = b\na.left = BiTree(3)\na.right = BiTree(37)\na.right.left = BiTree(24)\nb.right = BiTree(100)\nb.right.left = BiTree(61)\nb.right.left.right = BiTree(90)\nb.right.left.right.left = BiTree(78)\nprint(SolutionRecursion().postorder_traversal(root))\nprint(SolutionCirculation().postorder_traversal(root))" }, { "alpha_fraction": 0.4278523623943329, "alphanum_fraction": 0.43959730863571167, "avg_line_length": 22.8799991607666, "blob_id": "5f8d5629fe08f0f6ccade59dc520c91855e4bfeb", "content_id": "b49ae2679a31d3938bd6d6bb692788682ee60062", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 596, "license_type": "no_license", "max_line_length": 57, "num_lines": 25, "path": "/面试与笔试/笔试/tx2/4.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nn = int(input())\nT = sys.stdin.readline().strip()\nm = int(input())\ns = []\nfor i in range(m):\n s.append(sys.stdin.readline().strip())\n\ncount = 0\nfor i in range(m):\n diff = (len(T) % len(s[i]))\n if diff != 0 and T[-diff:-1] not in s[i]:\n continue\n elif s[i] != T[:len(s[i])]:\n continue\n else:\n j = 0\n while j + len(s[i]) - 1 < len(T):\n if T[j:j+len(s[i])] != s[i]:\n break\n j += len(s[i])\n if j + len(s[i]) - 1 >= len(T) and T[j:] in s[i]:\n count += 1\n # print(s[i])\nprint(count)" }, { "alpha_fraction": 0.4958263635635376, "alphanum_fraction": 0.5125208497047424, "avg_line_length": 23, "blob_id": "7441c758972f516ccd2b606129108235dc94af91", "content_id": "c47e84215a7222ff2460ca8f505ac41220fe18a1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 599, "license_type": "no_license", "max_line_length": 62, "num_lines": 25, "path": "/面试与笔试/笔试/360-2/2.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\n\nline = list(map(int, sys.stdin.readline().strip().split(\" \")))\nN = line[0]\nM = line[1]\nline = list(map(int, sys.stdin.readline().strip().split(\" \")))\n\n\ndef maxSubArray(nums, M):\n if not nums:\n return 0\n maxSum = current = nums[0]\n count = 1\n for n in nums[1:]:\n if (current + n) / float(count + 1) > n:\n current = current + n\n count += 1\n if count > M:\n maxSum = max(maxSum, current / float(count))\n else:\n current = n\n count = 1\n return maxSum\n\nprint(\"%.3f\" %maxSubArray(line, M))" }, { "alpha_fraction": 0.483146071434021, "alphanum_fraction": 0.4988763928413391, "avg_line_length": 17.54166603088379, "blob_id": "1d92c82cafe8a44bdde7649b3db7e2738ac35764", "content_id": "564a969cdca0c61b90bdd13585ed8c129b6fa153", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 890, "license_type": "no_license", "max_line_length": 56, "num_lines": 48, "path": "/面试与笔试/笔试/pdd/test1.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\na = sys.stdin.readline().strip().split()\nindex = 0\n\nfor i in range(1, len(a)):\n if int(a[i]) < int(a[index]):\n break\n index += 1\n\nsegs = []\n\n\nif index == 0:\n segs.append([])\n segs.append(a[index+1:])\n\n left = int(a[-1])\n right = float(\"+inf\")\n\nelse:\n left = int(a[index])\n if index + 2 < len(a):\n right = int(a[index + 2])\n else:\n right = float(\"+inf\")\n\n segs.append(a[:index + 1])\n segs.append(a[index + 2:])\n\nprint(left, right)\nprint(segs)\n\n\nb = sys.stdin.readline().strip().split()\nmax_ = float(\"-inf\")\nfor i in range(len(b)):\n if int(b[i]) > right:\n break\n if left <= int(b[i]) <= right:\n max_ =max(max_, int(b[i]))\n\nif max_ == float(\"-inf\"):\n print(\"NO\")\nelse:\n if segs[0] == []:\n print(' '.join(segs[1] + [str(max_)]))\n else:\n print(' '.join(segs[0] + [str(max_)] + segs[1]))\n" }, { "alpha_fraction": 0.5566893219947815, "alphanum_fraction": 0.5725623369216919, "avg_line_length": 21.615385055541992, "blob_id": "28f3c04a58c75c47b71f655f46098f15dc5628cc", "content_id": "2ab131a9b90c1523e16a8ae0af82bd7481147727", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1154, "license_type": "no_license", "max_line_length": 60, "num_lines": 39, "path": "/offer/22.链表中倒数第k个结点.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# 方法1.把链表反转,然后在从后往前,但是改变了链表的结构\n# 方法2.先遍历一遍链表,统计出一个有多少节点n,然后倒数第k个节点就是从头开始数第n-k+1个节点,再遍历一遍就行了\n# 方法3.设置两个变量,left和right,中间相差k,当right到达末尾时,left到达倒数第k个\n# 要考虑代码的鲁棒性:a.输入的为空指针 b.输入的链表总节点数小于k c.输入的k为0\n\n\n# -*- coding:utf-8 -*-\nclass ListNode:\n def __init__(self, x):\n self.val = x\n self.next = None\n\n\nclass Solution:\n def FindKthToTail(self, head, k):\n # write code here\n if head is None or k < 1:\n return None\n slow = fast = head\n for i in range(k - 1):\n if not fast.next:\n return None\n else:\n fast = fast.next\n while fast.next:\n fast = fast.next\n slow = slow.next\n return slow\n\na = ListNode(1)\nb = ListNode(2)\nc = ListNode(3)\nd = ListNode(4)\ne = ListNode(5)\na.next = b\nb.next = c\nc.next = d\nd.next = e\nprint(Solution().FindKthToTail(a, 1))\n" }, { "alpha_fraction": 0.3762102425098419, "alphanum_fraction": 0.3831258714199066, "avg_line_length": 29.04166603088379, "blob_id": "7b9a7d1a3beeaafcf639c9a18d5e3a04b0c05d59", "content_id": "30c47ef888c1c1576c69671764792c302719cc4a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 813, "license_type": "no_license", "max_line_length": 53, "num_lines": 24, "path": "/offer/31.栈的压入、弹出序列.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\nclass Solution:\n def __init__(self):\n self.stack = []\n\n def IsPopOrder(self, pushV, popV):\n # 遍历popV中的每一个元素,如果该元素与当前栈顶的元素相同,则弹出,如果不相同,\n # 则pushv中的元素继续入栈\n for k in popV:\n if not self.stack or self.stack[-1] != k:\n i = 0\n while i < len(pushV):\n if pushV[i] != k:\n self.stack.append(pushV[i])\n i += 1\n else:\n break\n if i == len(pushV):\n return False\n else:\n pushV = pushV[i + 1:]\n else:\n self.stack.pop()\n return True\n\n\n" }, { "alpha_fraction": 0.4403534531593323, "alphanum_fraction": 0.4624447822570801, "avg_line_length": 24.148147583007812, "blob_id": "01128d5d9f52122a0cedf2bb521463a0ea17deaa", "content_id": "9d71afca30b86f40e75ecaa542c55a8a9bf41e9b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 819, "license_type": "no_license", "max_line_length": 47, "num_lines": 27, "path": "/offer/5.替换空格.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n题目描述\n请实现一个函数,将一个字符串中的每个空格替换成“%20”。例如,\n当字符串为We Are Happy.则经过替换之后的字符串为We%20Are%20Happy。\n\"\"\"\n# -*- coding:utf-8 -*-\nclass Solution:\n # s 源字符串\n def replaceSpace1(self, s):\n # 法一:python 内置函数\n return s.replace(\" \", \"%20\")\n def replaceSpace2(self, s):\n # 法二:利用python list\n list_s = list(s)\n for i in range(len(list_s)):\n if list_s[i] == ' ':\n list_s[i] = '%20'\n return ''.join(list_s)\n def replaceSpace(self, s):\n # 法三:list\n res = ''\n for i in range(len(s)):\n if s[i] == ' ':\n res += '%20'\n else:\n res += s[i]\n return res\n" }, { "alpha_fraction": 0.4848484992980957, "alphanum_fraction": 0.5, "avg_line_length": 24.30769157409668, "blob_id": "44e5e184298c11a849a5ed5f3257ff6e02e68b88", "content_id": "3e33d037d21de9978b0f70062411c87537d679df", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 330, "license_type": "no_license", "max_line_length": 66, "num_lines": 13, "path": "/面试与笔试/笔试/wy2/2.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\n\nT = int(input().strip())\nfor i in range(T):\n line = list(map(int, sys.stdin.readline().strip().split(\" \")))\n n = line[0]\n m = line[1]\n h = list(map(int,sys.stdin.readline().strip().split(\" \")))\n total = (0 + n - 1) * n // 2\n if sum(h) + m < total:\n print(\"NO\")\n else:\n print(\"YES\")\n\n" }, { "alpha_fraction": 0.4627249240875244, "alphanum_fraction": 0.5089974403381348, "avg_line_length": 24.129032135009766, "blob_id": "bc1f7fdf0026138fa306e70bb3a80e57051d201b", "content_id": "97033bbbaa9992f1d9496b4fe36e7cf0c0933af9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 834, "license_type": "no_license", "max_line_length": 47, "num_lines": 31, "path": "/排序/直接插入排序.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# time: O(n^2)\n# 最好比较次数:n-1\n# 最好移动次数:0\n# 最坏比较次数:(n+2)(n-1)/2\n# 最坏移动次数:(n+4)(n-1)/2\nclass Solution:\n def insertSort(self, nums):\n if not nums:\n return nums\n for i in range(1, len(nums)):\n if nums[i-1] > nums[i]:\n tmp = nums[i]\n j = i - 1\n while j >= 0 and nums[j] > tmp:\n nums[j+1] = nums[j]\n j -= 1\n nums[j+1] = tmp\n return nums\n\nnums = []\nprint(Solution().insertSort(nums))\nnums = [1]\nprint(Solution().insertSort(nums))\nnums = [1,2,3]\nprint(Solution().insertSort(nums))\nnums = [4,3,2,1]\nprint(Solution().insertSort(nums))\nnums = [3,1,2,4,3,0]\nprint(Solution().insertSort(nums))\nnums = [1,4,5,6,3,4]\nprint(Solution().insertSort(nums))" }, { "alpha_fraction": 0.4444444477558136, "alphanum_fraction": 0.45679011940956116, "avg_line_length": 15.300000190734863, "blob_id": "8242ab3d9fca5c2bc59a992715f174cdf944f180", "content_id": "989961567641c0ad8e6f8849a2080f20203006f1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 162, "license_type": "no_license", "max_line_length": 20, "num_lines": 10, "path": "/面试与笔试/笔试/hw/11.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "s = input()\ns_list = s.split()\ns_min = s_list[-1]\ns_new = \"\"\nfor i in s_list[0]:\n if i in s_min:\n s_new += \"*\"\n else:\n s_new += i\nprint(s_new)" }, { "alpha_fraction": 0.4548286497592926, "alphanum_fraction": 0.4832121729850769, "avg_line_length": 27.889999389648438, "blob_id": "6c82c113a0ad9c90e0ad33a4bee20beef40d1a59", "content_id": "61543fba5bdc01eb8b4e84b6ce64d496fb996125", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3111, "license_type": "no_license", "max_line_length": 65, "num_lines": 100, "path": "/offer/39.数组中出现次数超过一半的数字.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\nimport collections\nclass Solution:\n def MoreThanHalfNum_Solution2(self, numbers):\n # 法一:使用Partition\n if not numbers:\n return False\n mid = len(numbers) // 2\n low = 0\n high = len(numbers) - 1\n index = self.partition(numbers, low, high)\n while index != mid:\n if index > mid:\n high = index - 1\n index = self.partition(numbers, low, high)\n else:\n low = index + 1\n index = self.partition(numbers, low, high)\n\n # 判断是否超过了一半\n res = numbers[mid]\n i = 0\n count = 0\n while i < len(numbers):\n if numbers[i] == res:\n count += 1\n i += 1\n if count * 2 <= len(numbers):\n return 0\n return res\n\n def partition(self, number, low, high):\n pivot = number[low]\n while low < high:\n while low < high and number[high] > pivot:\n high -= 1\n number[low] = number[high]\n while low < high and number[low] <= pivot:\n low += 1\n number[high] = number[low]\n # print(number)\n number[low] = pivot\n return low\n\n def MoreThanHalfNum_Solution(self, numbers):\n # 法二:根据数组特点,存储数字和其出现的次数,遍历,如果下一个出现的与当前保存的\n # 数字相同,则次数加一,如果不相同则次数减一,如果为0则需要保存下一个数字\n if not numbers:\n return 0\n key = numbers[0]\n num = 1\n for i in range(1,len(numbers)):\n if num == 0:\n key = numbers[i]\n num = 1\n elif numbers[i] == key:\n num += 1\n else:\n num -= 1\n # 判断是否存在出现多余一半的\n i = 0\n count = 0\n while i < len(numbers):\n if numbers[i] == key:\n count += 1\n i += 1\n if count * 2 > len(numbers):\n return key\n else:\n return 0\n\n def MoreThanHalfNum_Solution1(self, numbers):\n # 法三:使用partion\n c = collections.Counter(numbers)\n for k, v in c.items():\n if v > len(numbers) / 2:\n return k\n return 0\n\n def MoreThanHalfNum_Solution3(self, numbers):\n # 法四:使用dict\n d = {}\n for item in numbers:\n if item not in d.keys():\n d[item] = 1\n else:\n d[item] += 1\n if d[item] > len(numbers)/2:\n return item\n return 0\n\n\n\n\n# print(Solution().MoreThanHalfNum_Solution([1,2,3,2,4,2,5,2,3]))\n# print(Solution().MoreThanHalfNum_Solution([1,2,3,2,4,2,5,2,3]))\n# print(Solution().MoreThanHalfNum_Solution([1,1,1,1]))\n# print(Solution().MoreThanHalfNum_Solution([1,2,3,4]))\nprint(Solution().MoreThanHalfNum_Solution([1,2,3,2,2,2,5,4,2]))\nprint(Solution().MoreThanHalfNum_Solution([1,2,3,2,4,2,5,2,3]))\n" }, { "alpha_fraction": 0.45553821325302124, "alphanum_fraction": 0.4929797053337097, "avg_line_length": 23.69230842590332, "blob_id": "432683290b83c18a7ed71bb636a568c64f5af21e", "content_id": "dfc2792e5966bced44093db804a2180734981bc3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 641, "license_type": "no_license", "max_line_length": 60, "num_lines": 26, "path": "/最长公共(子)/连续子序列乘积最大-leetcode 152.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "class Solution(object):\n def maxProduct(self, nums):\n \"\"\"\n :type nums: List[int]\n :rtype: int\n \"\"\"\n max_dp = nums[0]\n min_dp = nums[0]\n res = nums[0]\n for i in range(1, len(nums)):\n tmpmax = max_dp * nums[i]\n tmpmin = min_dp * nums[i]\n\n if tmpmax < tmpmin:\n tmpmax, tmpmin = tmpmin, tmpmax\n\n max_dp = max(tmpmax, nums[i])\n min_dp = min(tmpmin, nums[i])\n\n res = max(res, max_dp)\n return res\n\n# https://blog.csdn.net/qq_35082030/article/details/79975912\n\n\nprint(Solution().maxProduct([2,3,-2,4]))" }, { "alpha_fraction": 0.5406386256217957, "alphanum_fraction": 0.5515239238739014, "avg_line_length": 20.88888931274414, "blob_id": "fdb7df0ba07432399a98dadc90e688d3ef70b182", "content_id": "431da77182583ea30919369e5b1715a939006a03", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1614, "license_type": "no_license", "max_line_length": 82, "num_lines": 63, "path": "/offer/37.序列化二叉树.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n题目描述\n请实现两个函数,分别用来序列化和反序列化二叉树\n\"\"\"\n\nclass TreeNode:\n def __init__(self, x):\n self.val = x\n self.left = None\n self.right = None\n\n\"\"\"\n前序遍历,(层序遍历应该是不行的),空节点要加上null,不然在反序列化的时候不好操作\n\"\"\"\nclass Solution:\n def Serialize(self, root):\n # write code here\n if not root:\n return [None]\n return [root.val] + self.Serialize(root.left) + self.Serialize(root.right)\n\n # 法1:使用递归的方式构建树,对于序列s的处理就是直接将其进行截断\n def Deserialize(self, s):\n # write code here\n if not s:\n return None\n\n root, s = self.Deserialize_(s)\n return root\n\n def Deserialize_(self, s):\n if not s[0]:\n return None, s[1:]\n\n root = TreeNode(s[0])\n s = s[1:]\n root.left, s = self.Deserialize_(s)\n root.right, s = self.Deserialize_(s)\n\n return root, s\n\n # 法2:直接设置一个下标变量index来记录当前读到s的位置\n index = 0\n def Deserialize1(self, s):\n if not s:\n return None\n return self.Deserialize_(s)\n\n def Deserialize_1(self, s):\n if not s[self.index]:\n self.index += 1\n return None\n\n root = TreeNode(s[self.index])\n self.index += 1\n\n root.left = self.Deserialize_(s)\n root.right= self.Deserialize_(s)\n\n return root\n\nprint(Solution().Deserialize([1,2,None,None,3,None,None]))" }, { "alpha_fraction": 0.5387722253799438, "alphanum_fraction": 0.5476574897766113, "avg_line_length": 19.966102600097656, "blob_id": "bf151452aa692d06939a0e6798490ddf6f92d72b", "content_id": "4a5a9a273e57322f1efaff678352ddeb646bae20", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1456, "license_type": "no_license", "max_line_length": 92, "num_lines": 59, "path": "/offer/30.包含min函数的栈.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n题目描述\n定义栈的数据结构,请在该类型中实现一个能够得到栈中所含最小元素的min函数(时间复杂度应为O(1))。\n\"\"\"\nclass Solution:\n # 使用两个栈,一个栈是正常的,另一个用来保存最小值,每一次push的时候,判断push的值与当前stack_min的最后一个值谁更小,\n # 把更小的当做当前最小值\n def __init__(self):\n self.stack = []\n self.stack_min = []\n\n def push(self, node):\n # write code here\n self.stack.append(node)\n if not self.stack_min:\n self.stack_min.append(node)\n else:\n self.stack_min.append(node if node < self.stack_min[-1] else self.stack_min[-1])\n\n def pop(self):\n # write code here\n if self.stack is []:\n return None\n else:\n self.stack_min.pop()\n return self.stack.pop()\n\n def top(self):\n # write code here\n if self.stack:\n return self.stack[-1]\n else:\n return None\n\n def min(self):\n # write code here\n if self.stack_min:\n return self.stack_min[-1]\n else:\n return None\n\ns = Solution()\ns.push(3)\nprint(s.min())\ns.push(4)\nprint(s.min())\ns.push(2)\nprint(s.min())\ns.push(3)\nprint(s.min())\nprint(s.pop())\nprint(s.min())\nprint(s.pop())\nprint(s.min())\nprint(s.pop())\nprint(s.min())\ns.push(0)\nprint(s.min())\n\n" }, { "alpha_fraction": 0.5033783912658691, "alphanum_fraction": 0.5320945978164673, "avg_line_length": 23.70833396911621, "blob_id": "e60d3049d1bb67674254d36c6098a63ec76f7a1f", "content_id": "7b7448c5e4409b8411cac05ef72fcf7a4e1dd908", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 760, "license_type": "no_license", "max_line_length": 61, "num_lines": 24, "path": "/offer/62.圆圈中最后剩下的数字.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n0,1,2...n-1这n个数排成一个圆圈,从数字0开始,每次从这个圆圈中删除第m个数字,求出这个圆圈理剩下的最后一个数字\n\"\"\"\n'''\n思路:使用一个list来模拟一个圆圈,每次删去一个,然后再按正确的顺序拼接一下\n'''\nclass Solution:\n def LastRemaining_Solution(self, n, m):\n # write code here\n if n < 1 or m < 1:\n return -1\n queue = []\n for i in range(n):\n queue.append(i)\n\n cur = 0\n while len(queue) > 1:\n remove = (cur + m - 1) % len(queue)\n queue = queue[remove + 1:] + queue[:remove]\n cur = 0\n return queue[0]\n\nprint(Solution().LastRemaining_Solution(5, 3))" }, { "alpha_fraction": 0.5042253732681274, "alphanum_fraction": 0.5183098316192627, "avg_line_length": 21.1875, "blob_id": "ebfbede85a6d9de8e1ca77a175418aae8e828f34", "content_id": "5d756f821cf218071b6e43e2e67bc2a5bed1a426", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 355, "license_type": "no_license", "max_line_length": 62, "num_lines": 16, "path": "/面试与笔试/笔试/wy2/3.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\n\ndef f(data):\n if len(data) <= 1:\n return 0\n count = 0\n length = len(data)\n for i in range(length-1):\n for j in range(i+1, length):\n if data[i] > data[j]:\n count += (j-i)\n return count\n\nT = int(input().strip())\ndata = list(map(int, sys.stdin.readline().strip().split(\" \")))\nprint(f(data))\n" }, { "alpha_fraction": 0.4161290228366852, "alphanum_fraction": 0.44301074743270874, "avg_line_length": 24.80555534362793, "blob_id": "f314e113970ab893452a44ee8f4d829ea42917c2", "content_id": "98c3ea665b0a30307d617b95ace9071c24f07baa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1038, "license_type": "no_license", "max_line_length": 58, "num_lines": 36, "path": "/offer/60.n个骰子的点数.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\n\"\"\"\n把n个骰子仍在地上,所有骰子朝上一面的点数只和为s,输入n,打印出s的所有可能的值出现的概率\n\"\"\"\nimport math\nMAX_VALUE = 6\nclass Solution:\n def propobility(self, n):\n if n < 1:\n return\n prob = [[0]*(MAX_VALUE * n + 1) for i in range(2)]\n flag = 0\n for i in range(1, MAX_VALUE + 1):\n prob[flag][i] = 1\n\n for k in range(2, n + 1):\n for i in range(0, k):\n prob[1 - flag][i] = 0\n\n for i in range(k, MAX_VALUE * k + 1):\n prob[1 - flag][i] = 0\n j = 1\n while j < i and j <= MAX_VALUE:\n prob[1-flag][i] += prob[flag][i-j]\n j += 1\n\n flag = 1 - flag\n\n # 最后的值就存在prob[flag]中,求概率\n # total = math.pow(6, n)\n # for i in range(MAX_VALUE * n + 1):\n # prob[flag][i] /= float(total)\n return prob[flag]\n\nprint(Solution().propobility(2))\n\n" }, { "alpha_fraction": 0.5742705464363098, "alphanum_fraction": 0.6034482717514038, "avg_line_length": 19.94444465637207, "blob_id": "fbfc42f6af66b2a1c07d6a1250d1d3c2506f8476", "content_id": "8040ef7dec8ca691336cb5048d0bc06adffbee71", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 754, "license_type": "no_license", "max_line_length": 51, "num_lines": 36, "path": "/查找/二叉排序树查找.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "class BiTree:\n def __init__(self, x):\n self.val = x\n self.left = None\n self.right = None\n\n\nclass Solution:\n def searchBST(self, root, node):\n if not root or root.val == node.val:\n return root\n if root.val > node.val:\n return self.searchBST(root.left, node)\n else:\n return self.searchBST(root.right, node)\n\n\nroot = BiTree(45)\na = BiTree(12)\nb = BiTree(53)\nroot.left = a\nroot.right = b\na.left = BiTree(3)\na.right = BiTree(37)\na.right.left = BiTree(24)\nb.right = BiTree(100)\nb.right.left = BiTree(61)\nb.right.left.right = BiTree(90)\nb.right.left.right.left = BiTree(78)\nnode = BiTree(70)\n\nres = Solution().searchBST(root, node)\nif res:\n print(res.val)\nelse:\n print(res)\n" }, { "alpha_fraction": 0.4112359583377838, "alphanum_fraction": 0.43820226192474365, "avg_line_length": 25, "blob_id": "4e9d899d268e7bfe92292ad685ccd185b7ad45ee", "content_id": "87f4941d42cf40af6476d8b0d9eb42ba3d68b494", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 445, "license_type": "no_license", "max_line_length": 71, "num_lines": 17, "path": "/面试与笔试/笔试/hulu/3.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nn = int(input().strip())\nnum = []\nfor i in range(n):\n num.append(list(map(int, sys.stdin.readline().strip().split(' '))))\n\ndp = [[0]*n for i in range(n)]\n\nfor i in range(n):\n for j in range(n):\n if i == 0:\n dp[i][j] = dp[i][j-1] + (num[i][j] == 1)\n if j == 0:\n dp[i][j] = dp[i-1][j] + (num[i][j] == 1)\n dp[i][j] = min(dp[i-1][j], dp[i][j-1]) + (num[i][j] == 1)\n\nprint(dp[n-1][n-1])\n\n\n\n" }, { "alpha_fraction": 0.522580623626709, "alphanum_fraction": 0.550537645816803, "avg_line_length": 21.095237731933594, "blob_id": "a99d4c1172afd8c4277cfd6c1a92b613fdcbde88", "content_id": "d277c143297851c00756c6f1ee561349412cba23", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 465, "license_type": "no_license", "max_line_length": 62, "num_lines": 21, "path": "/面试与笔试/笔试/dxm/2.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nline = list(map(int, sys.stdin.readline().strip().split(\" \")))\nN = line[0]\nW = line[1]\n\nw = list(map(int, sys.stdin.readline().strip().split(\" \")))\nt = list(map(int, sys.stdin.readline().strip().split(\" \")))\n\nnums = list(zip(w,t))\nnums.sort(key=lambda x:x[0])\n\ni = 0\nres = 0\nfor i in range(len(nums)):\n j = len(nums) - 1\n while j >= 0 and nums[j][1] + nums[i][1] <= W:\n j -= 1\n res += max(nums[i][1], nums[j][1])\n res = 4\n\nprint(res)\n\n" }, { "alpha_fraction": 0.36565837264060974, "alphanum_fraction": 0.37811386585235596, "avg_line_length": 18.016948699951172, "blob_id": "92e6d36508a4ba3fc508a56bac1a50de36481338", "content_id": "725db41a88375a69c9f86988207d1ba850716e80", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1124, "license_type": "no_license", "max_line_length": 50, "num_lines": 59, "path": "/面试与笔试/笔试/ks/1.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "n = int(input().strip())\nm = int(input().strip())\ns = []\nfor i in range(m):\n s.append(input().strip())\n\ni = 0\nres = []\np = []\nv = []\nflag = True\nwhile i < len(s):\n if flag and not s[i].startswith(\"P\"):\n res.append(s[i])\n else:\n flag = False\n if s[i].startswith(\"P\"):\n p.append(s[i])\n else:\n v.append(s[i])\n i += 1\n\n# print(p,v)\n\nif not len(p):\n print(res)\nelse:\n i = 0\n j = 0\n aa = 0\n while i < len(p) and j < len(v):\n res.append(p[i])\n count = n - 1\n k = j\n while count > 0 and j < len(v):\n res.append(v[j])\n count -= 1\n j += 1\n else:\n if j != len(v):\n j -= 1\n i += 1\n j += 1\n aa = count\n else:\n if j >= len(v) and aa == 0 and i < len(p):\n res.append(p[i])\n elif i >= len(p):\n res += v[j:]\n # if aa == 0:\n # if i >= len(p):\n # res += v[j:]\n # else:\n # res.append(p[i])\n\n\nprint(len(res))\nfor i in range(len(res)):\n print(res[i])\n\n\n" }, { "alpha_fraction": 0.4938775599002838, "alphanum_fraction": 0.4979591965675354, "avg_line_length": 30.913043975830078, "blob_id": "53fb8e3022537c1a46c433e8da96709d06a3eab3", "content_id": "23bbdaa376a3c8930443cb1a56cff8e402374e39", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 735, "license_type": "no_license", "max_line_length": 51, "num_lines": 23, "path": "/面试与笔试/笔试/ks/44.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nclass Solution:\n def solution(self, equation, x='x'):\n _equation = equation.replace(\"=\", \"-(\")+\")\"\n _equation = _equation.replace(\"x\",\"*x\")\n _equation = _equation.replace(\"+*x\",\"+x\")\n _equation = _equation.replace(\"-*x\",\"-x\")\n _equation = _equation.replace(\"(*x\",\"(x\")\n _equation = _equation.strip(\"*\")\n try:\n result = eval(_equation, {x:1j})\n res = - result.real / result.imag\n if int(res) == res:\n return int(res)\n else:\n return -1\n except:\n return -1\n\nline = str(sys.stdin.readline().strip())\nline = line.replace(\"*\", \"\")\nline = line.lower()\nprint(Solution().solution(line))\n\n" }, { "alpha_fraction": 0.4996066093444824, "alphanum_fraction": 0.5192761421203613, "avg_line_length": 36.411766052246094, "blob_id": "d48bf63f53fb431131378d9fe1689b2999aea5ce", "content_id": "29436b2c71cc245a09ea5f307d60ce8ace7dc1f5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1489, "license_type": "no_license", "max_line_length": 83, "num_lines": 34, "path": "/offer/29.顺时针打印矩阵.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n# 存在的问题,输入的是否是n*n的矩阵\nclass Solution:\n # matrix类型为二维列表,需要返回列表\n # 两个函数,一个用来循环,另一个用来实现打印一圈\n def printMatrix(self, matrix):\n # 通过画图可知,每一圈的开始都是(i,i),循环的继续的条件为cols > i * 2 and rows > i * 2\n if not matrix or len(matrix) == 0 or len(matrix[0]) == 0:\n return []\n res = []\n i = 0\n while i * 2 < len(matrix) and i * 2 < len(matrix[0]):\n res += self.printMatrixInCircle(matrix, len(matrix), len(matrix[0]), i)\n i += 1\n return res\n\n def printMatrixInCircle(self, matrix, rows, cols, start):\n res = []\n # 从左到右打印一行\n for i in range(start, cols-start):\n res.append(matrix[start][i])\n # 从上到下打印一列\n if start < rows - start - 1:\n for i in range(start + 1, rows - start):\n res.append(matrix[i][cols - start -1])\n # 从右到左打印一行\n if start < cols-start - 1 and start < rows - start - 1:\n for i in range(cols-start-2, start-1, -1):\n res.append(matrix[rows - start - 1][i])\n # 从下到上打印一列\n if start < cols - start - 1 and start < rows - start - 2:\n for i in range(rows - start - 2, start, -1):\n res.append(matrix[i][start])\n return res" }, { "alpha_fraction": 0.4949715733528137, "alphanum_fraction": 0.5268911123275757, "avg_line_length": 28.320512771606445, "blob_id": "1ee6d60c2599d59ddb4c40d711ec976fad8967b8", "content_id": "f6ae001b1bfd5d2b08b3690d0bccf2468be06403", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2287, "license_type": "no_license", "max_line_length": 78, "num_lines": 78, "path": "/面试与笔试/笔试/搜狗/1.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\n# import numpy as np\nimport random\ndef voc(x,y):\n voc_set = set()\n for i in x:\n voc_set = voc_set | set(i)\n voc_list = list(voc_set)\n # print(voc_list)\n a = np.zeros((len(x), len(voc_list)))\n for index, i in enumerate(x):\n for j in i:\n a[index][voc_list.index(j)] += 1\n # print(a)\n X = np.array(x)\n # print(X)\n Y = np.array(y)\n P0 = a[Y == 0]\n # print(a[Y==0])\n # print(np.sum(P0, axis=0))\n P0Sum = np.sum(P0,axis=0)/np.sum(P0)\n # print(P0Sum)\n P0class = len(Y[Y==0])/float(len(Y))\n P1 = a[Y == 1]\n P1Sum = np.sum(P1,axis=0)/np.sum(P0)\n P1class = len(Y[Y==1])/float(len(Y))\n\n return voc_list, P0class, P1class,P0Sum, P1Sum\n\ndef prediction(word_list, voc_list, p0class, p1class, p0sum, p1sum):\n voc_vec = np.zeros(len(voc_list))\n for i in word_list:\n if i in voc_list:\n voc_vec[voc_list.index(i)] =1\n word_list_p0sum = np.log(p0class)\n word_list_p1sum = np.log(p1class)\n\n for index, i in enumerate(voc_vec):\n if i != 0:\n if p0sum[index] != 0:\n word_list_p0sum += np.log(p0sum[index])\n if p1sum[index] != 0:\n word_list_p1sum += np.log(p1sum[index])\n # print(word_list_p0sum)\n # print(word_list_p1sum)\n if word_list_p0sum <= word_list_p1sum:\n return 0\n else:\n return 1\n\n\nif __name__=='__main__':\n nums = sys.stdin.readline().strip().split(\" \")\n nums = list(map(int, [x for x in nums if x != '']))\n M = nums[0]\n N = nums[1]\n d = nums[2]\n x = []\n y = []\n test = []\n for i in range(M):\n line = sys.stdin.readline().strip().split(\" \")\n line = list(map(int, [x for x in line if x != '']))\n x.append(line[1:])\n y.append(line[0])\n for i in range(N):\n line = sys.stdin.readline().strip().split(\" \")\n line = list(map(int, [x for x in line[1:] if x != '']))\n test.append(line)\n # print(x)\n # print(y)\n # print(test)\n voc_list, p0class, p1class, p0sum, p1sum = voc(x,y)\n # print(voc_list, p0class, p1class, p0sum, p1sum)\n for i in range(N):\n # print(prediction(test[i], voc_list, p0class, p1class, p0sum, p1sum))\n k = (random.randrange(0,10))\n print(1 if k > 5 else 0)\n" }, { "alpha_fraction": 0.3986518979072571, "alphanum_fraction": 0.4309099614620209, "avg_line_length": 29.514705657958984, "blob_id": "87cc0eccd244dc47ae773e486744a5297437255c", "content_id": "58d9084ffbcf01ec8db04fcee4992c6deac9e14c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2285, "license_type": "no_license", "max_line_length": 66, "num_lines": 68, "path": "/offer/47.礼物的最大价值.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n在一个m*n的棋盘上,每一格都放有礼物,从棋盘的左上角开始,每次向左或者向下移动一格,直到右下角,求礼物的最大价值\n\"\"\"\n\"\"\"\n思路:动态规划,dp[i][j] = max(dp[i-1][j], dp[i][j-1]) + matrix[i][j]\n不需要使用二维数组,一维就够了:dp[i] = max(dp[i-1], dp[i]) + matrix[i][j]\n\"\"\"\n\nclass Solution:\n def max_gift_value_1(self, matrix):\n # 一维数组存储\n if not matrix:\n return 0\n dp = [0] * len(matrix[0])\n for i in range(len(matrix)):\n for j in range(len(matrix[0])):\n if i == 0 and j == 0:\n dp[j] = matrix[i][j]\n elif i == 0:\n dp[j] = dp[j-1] + matrix[i][j]\n elif j == 0:\n dp[j] = dp[j] + matrix[i][j]\n else:\n dp[j] = max(dp[j-1], dp[j]) + matrix[i][j]\n return dp[j]\n\n def max_gift_value_2(self, matrix):\n # 一维数组存储,更加简洁的代码\n if not matrix:\n return 0\n dp = [0] * len(matrix[0])\n for i in range(len(matrix)):\n for j in range(len(matrix[0])):\n left = 0\n up = 0\n if i > 0:\n up = dp[j]\n if j > 0:\n left = dp[j-1]\n dp[j] = max(left, up) + matrix[i][j]\n return dp[j]\n\n def max_gift_value_3(self, matrix):\n # 二维数组存储\n if not matrix:\n return 0\n dp = [[0]*len(matrix[0])] * len(matrix)\n for i in range(len(matrix)):\n for j in range(len(matrix[0])):\n left = 0\n up = 0\n if i > 0:\n up = dp[i-1][j]\n if j > 0:\n left = dp[i][j-1]\n dp[i][j] = max(left, up) + matrix[i][j]\n return dp[i][j]\n\nmatrix = [[1, 10, 3, 8],[12, 2, 9, 6],[5, 7, 4, 11],[3, 7, 16, 5]]\nprint(Solution().max_gift_value_3(matrix))\nmatrix = [[2,3,4]]\nprint(Solution().max_gift_value_1(matrix))\nmatrix = [[2],[3],[4]]\nprint(Solution().max_gift_value_1(matrix))\nmatrix = [[1]]\nprint(Solution().max_gift_value_1(matrix))\nmatrix = None\nprint(Solution().max_gift_value_1(matrix))\n\n\n" }, { "alpha_fraction": 0.5165315270423889, "alphanum_fraction": 0.5670617818832397, "avg_line_length": 27.64285659790039, "blob_id": "e12ce2730c0e53826de4f5b939397d1ebe43645b", "content_id": "1986fb18df2d98d93804c84db5c5ef1d4d8f4cc6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1763, "license_type": "no_license", "max_line_length": 92, "num_lines": 56, "path": "/offer/49.丑数.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n只包含因子2/3/5的数为丑数,求从小到大第1500个丑数是什么\n\"\"\"\n\"\"\"\n思路一:逐一判断是否是丑数,直到1500个\n思路二:生成1500个丑数,后面的每一个丑数肯定是当前丑数集合中的某一个数乘以2或3或5得到的\n\"\"\"\nclass Solution:\n def isUgly(self, num):\n while num % 2 == 0:\n num = num / 2\n while num % 3 == 0:\n num = num /3\n while num % 5 == 0:\n num /= 5\n return num == 1\n\n def GetUglyNumber_Solution(self, index):\n count = 0\n num = 0\n while count < index:\n num += 1\n if self.isUgly(num):\n count += 1\n return num\n\n\nclass Solution2:\n def GetUglyNumber_Solution(self, index):\n if index < 1:\n return 0\n count = 1\n ugly = [1]*index\n lastugly2 = 0\n lastugly3 = 0\n lastugly5 = 0\n while count < index:\n ugly[count] = min(ugly[lastugly2] * 2, ugly[lastugly3] * 3, ugly[lastugly5] * 5)\n while ugly[lastugly2] * 2 <= ugly[count]:\n lastugly2 += 1\n while ugly[lastugly3] * 3 <= ugly[count]:\n lastugly3 += 1\n while ugly[lastugly5] * 5 <= ugly[count]:\n lastugly5 += 1\n count += 1\n return ugly[count-1]\n\n# print(Solution2().GetUglyNumber_Solution(1))\nprint(Solution2().GetUglyNumber_Solution(2))\nprint(Solution2().GetUglyNumber_Solution(3))\nprint(Solution2().GetUglyNumber_Solution(4))\nprint(Solution2().GetUglyNumber_Solution(5))\nprint(Solution2().GetUglyNumber_Solution(6))\nprint(Solution2().GetUglyNumber_Solution(7))\nprint(Solution2().GetUglyNumber_Solution(8))\nprint(Solution2().GetUglyNumber_Solution(9))" }, { "alpha_fraction": 0.4275362193584442, "alphanum_fraction": 0.4316770136356354, "avg_line_length": 23.174999237060547, "blob_id": "edb68a5c1e93a222713efe4d2f7b76a9b8399061", "content_id": "87983c9388b44cae38ead55166e4aecce6da9987", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 966, "license_type": "no_license", "max_line_length": 74, "num_lines": 40, "path": "/面试与笔试/笔试/tt/1.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nn = int(input().strip())\nmatrix = []\nfor i in range(n):\n matrix.append(list(map(int, sys.stdin.readline().strip().split(' '))))\n\nd = {}\nfor i in range(n):\n for j in range(i+1, n):\n if matrix[i][j] >= 3:\n if i not in d.keys():\n d[i] = [j]\n else:\n d[i].append(j)\n\nif len(d.keys()) == 1:\n print(n - len(d[list(d.keys())[0]]))\nelse:\n # new_keys = d.keys()\n remove = []\n for key in d.keys():\n if key not in remove:\n val = d[key]\n add = []\n for tmp in val:\n if tmp in d.keys():\n add += d[tmp]\n remove.append(tmp)\n add = list(set(add))\n d[key] += add\n for tmp in remove:\n d.pop(tmp)\n count = len(d.keys())\n all = []\n for tmp in d.keys():\n all.append(tmp)\n all += d[tmp]\n all = list(set(all))\n count += n - len(all)\n print(count)" }, { "alpha_fraction": 0.5053763389587402, "alphanum_fraction": 0.5555555820465088, "avg_line_length": 18.85714340209961, "blob_id": "e79a63f1a0ff04c92cfaacc8f81b44b4f35a0d73", "content_id": "0fb22118a3c9728a103e77e21c8991b34eac9317", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 279, "license_type": "no_license", "max_line_length": 46, "num_lines": 14, "path": "/面试与笔试/笔试/nvidia/33.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\n\nline = sys.stdin.readline().strip().split('.')\nline = line[::-1]\nres = 0\nfor j, i in enumerate(line):\n res += 256 ** j * int(i)\nprint(res)\n\nline = int(input())\nres = []\nfor i in range(3, -1, -1):\n res.append(str(line // (256 ** i) % 256))\nprint('.'.join(res))\n\n" }, { "alpha_fraction": 0.4806534945964813, "alphanum_fraction": 0.5236457586288452, "avg_line_length": 28.846153259277344, "blob_id": "bca833098bf3afe921ccdf86984bb128d09b554a", "content_id": "8fd23e54a4beeea5622b395250c30abf937ac147", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1479, "license_type": "no_license", "max_line_length": 75, "num_lines": 39, "path": "/面试与笔试/求平方根.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# 求一个数的平凡根,可以使用牛顿迭代法和二分查找法\n\nclass Solution:\n # 二分搜索法\n def binary_search(self, num):\n low = 0\n high = num\n while high - low > 1e-7:\n mid = (low + high) / 2\n if abs(mid * mid - num) <= 1e-7:\n return mid\n elif mid * mid > num:\n high = mid\n else:\n low = mid\n return mid\n\n # 牛顿迭代法\n # f(x) = x^2 - n\n # 设r是f(x) = 0的根,选取x0作为r初始近似值,过点(x0,f(x0))做曲线y = f(x)的切线L,L的方程为\n # y = f(x0)+f'(x0)(x-x0),求出L与x轴交点的横坐标 x1 = x0-f(x0)/f'(x0),称x1为r的一次近似值。\n # 过点(x1,f(x1))做曲线y = f(x)的切线,并求该切线与x轴交点的横坐标 x2 = x1-f(x1)/f'(x1),称\n # x2为r的二次近似值。重复以上过程,得r的近似值序列,其中x(n+1)=x(n)-f(x(n))/f'(x(n)),称为r的\n # n+1次近似值,上式称为牛顿迭代公式。\n # 则 x2 = x1 - (x1^2-n)/(2x1) = x1/2 + n/(2x1)\n def newton(self, num):\n k = 1\n while abs(k * k - num) > 1e-7:\n k = (k + num/k) / 2\n return k\n\nprint(Solution().binary_search(2))\nprint(Solution().newton(2))\nprint(Solution().binary_search(4))\nprint(Solution().newton(4))\nprint(Solution().binary_search(8))\nprint(Solution().newton(8))\nprint(Solution().binary_search(9))\nprint(Solution().newton(9))" }, { "alpha_fraction": 0.5416666865348816, "alphanum_fraction": 0.5763888955116272, "avg_line_length": 19.571428298950195, "blob_id": "0a255209b8eedf9b33f98761127acac4df9de215", "content_id": "375b02fa5bb4c6b65650adf1783d29602496bdfc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 144, "license_type": "no_license", "max_line_length": 62, "num_lines": 7, "path": "/面试与笔试/笔试/pingan/1.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\n\nline = list(map(int, sys.stdin.readline().strip().split(\" \")))\na = line[0]\nb = line[1]\nres = pow(a, 1/b)\nprint(\"%.6f\" %pow(a, 1/b))\n" }, { "alpha_fraction": 0.3727121353149414, "alphanum_fraction": 0.41763725876808167, "avg_line_length": 23, "blob_id": "75e467f02ba4d43e6c2d99c648a28f2f7452aeee", "content_id": "1c46368dfb18fb87ce062da8be3246252a37f6a8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 601, "license_type": "no_license", "max_line_length": 50, "num_lines": 25, "path": "/面试与笔试/和为0的连续最长子串.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "#coding=utf-8\nimport sys\n\ndef max_seq(nums):\n dp = [float(\"-inf\")] * len(nums)\n for i in range(len(nums)):\n if i == 0:\n dp[i] = nums[i]\n else:\n dp[i] = dp[i-1] + nums[i]\n max_len = 0\n dic = {}\n for j in range(len(dp)):\n if dp[j] in dic.keys():\n max_len = max(max_len, j - dic[dp[j]])\n else:\n if dp[j] == 0:\n dic[dp[j]] = -1\n else:\n dic[dp[j]] = j\n return max_len\n\nprint(max_seq([1,-1,1,-1,0,5]))\nprint(max_seq([1,-1,1,-1,1,-1,1,-1]))\nprint(max_seq([1,1,-1,1,1,-1,-1]))\n\n" }, { "alpha_fraction": 0.512110710144043, "alphanum_fraction": 0.5519031286239624, "avg_line_length": 29.473684310913086, "blob_id": "0b0289cc792b9fee2c79f14b6b1b267de5c9169b", "content_id": "742b82ef7dcc60b4e583b5543d2259b1f7ce3873", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 578, "license_type": "no_license", "max_line_length": 66, "num_lines": 19, "path": "/offer/42.连续子数组的最大和.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n'''\ndp:\ndp[i]=nums[i] if dp[i-1]<0 or i = 0\n nums[i] + dp[i-1] if i!= 0 and dp[i-1] > 0\n'''\nclass Solution:\n def FindGreatestSumOfSubArray(self, array):\n currmax = 0\n resmax = float(\"-inf\")\n for i in range(len(array)):\n currmax = max(array[i], currmax + array[i])\n # print(currmax)\n resmax = max(resmax, currmax)\n # print(resmax)\n return resmax\n\nprint(Solution().FindGreatestSumOfSubArray([6,-3,-2,7,-15,1,2,2]))\nprint(Solution().FindGreatestSumOfSubArray([-2,-8,-1,-5,-9]))" }, { "alpha_fraction": 0.4367816150188446, "alphanum_fraction": 0.4482758641242981, "avg_line_length": 23.11111068725586, "blob_id": "0dc183ccebe843026d221be9a6807649fc1b6584", "content_id": "9eb04e2061e14565f632f997bca50515d655545b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 870, "license_type": "no_license", "max_line_length": 52, "num_lines": 36, "path": "/面试与笔试/笔试/pdd/3.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\ndef func(tasks, graph_inputs):\n import queue\n n = len(tasks)\n graph = {}\n count = [0]*(n+1)\n to_visit = queue.PriorityQueue()\n for g in graph_inputs:\n a, b=g\n if a not in graph:\n graph[a] = []\n graph[a].append(b)\n count[b] += 1\n for i in range(1, n+1):\n if count[i] == 0:\n to_visit.put((tasks[i-1], i, i))\n res = []\n # print(count)\n # print(graph)\n while not to_visit.empty():\n temp = to_visit.get()\n _,_,cur = temp\n # print(temp, cur)\n res.append(cur)\n if cur in graph:\n # print(cur)\n for b in graph[cur]:\n count[b] -= 1\n # print('count', b)\n if count[b] == 0:\n to_visit.put((tasks[b-1], b, b))\n return res\n\n\n\nres = func(tasks, graph_inputs)\nprint(res)\n\n" }, { "alpha_fraction": 0.46105262637138367, "alphanum_fraction": 0.4736842215061188, "avg_line_length": 21.66666603088379, "blob_id": "590752a212991abb6ee600ecf220e9df30d6807b", "content_id": "1d935103e7128a4887f16207e0458ad3b32c4e21", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 525, "license_type": "no_license", "max_line_length": 48, "num_lines": 21, "path": "/offer/50.第一个只出现一次的字符.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\n\"\"\"\n在字符串中找出第一个只出现一次的字符,输入\"abaccdeff\",则输出1\n\"\"\"\n\nclass Solution:\n def FirstNotRepeatingChar(self, s):\n # write code here\n dic = dict()\n for i in range(len(s)):\n if s[i] in dic.keys():\n dic[s[i]] += 1\n else:\n dic[s[i]] = 1\n for i in range(len(s)):\n if dic[s[i]] == 1:\n return i\n return -1\n\nprint(Solution().firstNotRepeating(\"abaccdeff\"))" }, { "alpha_fraction": 0.46023234724998474, "alphanum_fraction": 0.491510272026062, "avg_line_length": 23.844444274902344, "blob_id": "5d49aa7ea824ad0f5e7ce1bf98906f6ecdd531ef", "content_id": "b62f05fa0781574919ac3b6b900ce4cc580baf2e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1247, "license_type": "no_license", "max_line_length": 55, "num_lines": 45, "path": "/offer/25.合并两个排序的链表.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n题目描述\n输入两个单调递增的链表,输出两个链表合成后的链表,当然我们需要合成后的链表满足单调不减规则。\n\"\"\"\nclass ListNode:\n def __init__(self, x):\n self.val = x\n self.next = None\n\n\nclass Solution:\n # 返回合并后列表\n def Merge1(self, pHead1, pHead2):\n # 法一:递归\n if not pHead1:\n return pHead2\n elif not pHead2:\n return pHead1\n if pHead1.val <= pHead2.val:\n head = pHead1\n head.next = self.Merge(pHead1.next, pHead2)\n else:\n head = pHead2\n head.next = self.Merge(pHead1, pHead2.next)\n return head\n\n def Merge(self, pHead1, pHead2):\n # 循环\n if not pHead1:\n return pHead2\n elif not pHead2:\n return pHead1\n head = p = ListNode(0)\n while pHead1 and pHead2:\n if pHead1.val <= pHead2.val:\n p.next = pHead1\n p = p.next\n pHead1 = pHead1.next\n else:\n p.next = pHead2\n p = p.next\n pHead2 = pHead2.next\n p.next = pHead1 or pHead2\n return head.next\n\n" }, { "alpha_fraction": 0.4073426425457001, "alphanum_fraction": 0.48076921701431274, "avg_line_length": 37.20000076293945, "blob_id": "ab991e32a40089212ea950e8aad0f5e176918f39", "content_id": "63db35c40e29ccd15d5668002e16cc6c9ff4a26d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 702, "license_type": "no_license", "max_line_length": 124, "num_lines": 15, "path": "/面试与笔试/背包问题-完全背包问题.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# https://blog.csdn.net/qq_22526061/article/details/83504116\n\"\"\"\n还是原来的问题,上次是每个物体只能用1次,现在改成无限次,转移方程只需要修改一点即可dp[i][j] = max(dp[ i - 1, j ], dp[ i, j - w[ i ] ] + v [ i ] ) 只需要把后面的i-1 -> i 即可\n\"\"\"\ndef pack2(w, v, C): #每个东西能选择多次 完全背包问题\n dp = [[0 for _ in range(C+1)] for _ in range(len(w)+1)]\n for i in range(1, len(w)+1):\n for j in range(1, C+1):\n if j < w[i-1]:\n dp[i][j] = dp[i-1][j]\n else:\n dp[i][j] = max(dp[i-1][j], dp[i][j-w[i-1]]+ v[i-1])\n for i in dp:\n print(i)\npack2([2,3,4,5], [3,4,5,6], 8)" }, { "alpha_fraction": 0.42457419633865356, "alphanum_fraction": 0.5103406310081482, "avg_line_length": 31.235294342041016, "blob_id": "f3479539ee71b254d7f4dd4cf4d7f4773ff8bf0e", "content_id": "2b99d479aff0971e9008df69a5a9c0f3fbc42d94", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2014, "license_type": "no_license", "max_line_length": 100, "num_lines": 51, "path": "/offer/59.滑动窗口的最大值.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\n\"\"\"\n题目描述\n给定一个数组和滑动窗口的大小,找出所有滑动窗口里数值的最大值。例如,如果输入数组{2,3,4,2,6,2,5,1}及滑动窗口的大小3,那么一共存在6个滑动窗\n口,他们的最大值分别为{4,4,6,6,6,5}; 针对数组{2,3,4,2,6,2,5,1}的滑动窗口有以下6个: {[2,3,4],2,6,2,5,1}, {2,[3,4,2],6,2,5,1},\n {2,3,[4,2,6],2,5,1}, {2,3,4,[2,6,2],5,1}, {2,3,4,2,[6,2,5],1}, {2,3,4,2,6,[2,5,1]}。\n\"\"\"\n\n'''\n思路:使用一个额外的双向开口的队列来保存最大值\n队列存最大值:如果当前要存入的大于队尾,则队尾弹出,直到不小于当前,然后当前入列;如果小于,则直接加入队尾\n'''\nclass Solution:\n def maxInWindows(self, num, size):\n # write code here\n if not num or len(num) < size or size < 1:\n return []\n max_queue = []\n res = []\n for i in range(len(num)):\n # if not max_queue:\n # max_queue.append(num[i])\n # elif num[i] > max_queue[-1]:\n # while max_queue and num[i] > max_queue[-1]:\n # max_queue.pop()\n # max_queue.append(num[i])\n # else:\n # max_queue.append(num[i])\n\n # 上面的可以直接用下面三行代替\n while max_queue and num[i] > max_queue[-1]:\n max_queue.pop()\n max_queue.append(num[i])\n\n if i < size - 1:\n continue\n else:\n res.append(max_queue[0])\n if max_queue[0] == num[i - size + 1]:\n max_queue = max_queue[1:]\n return res\n\n\n\n# print(Solution().maxInWindows([6,6,6,6,6,7],2))\n# print(Solution().maxInWindows([10,14,12,11],0))\n# print(Solution().maxInWindows([10,14,12,11],1))\nprint(Solution().maxInWindows([16,14,12,10,8,6,4],5))\nprint(Solution().maxInWindows([1,3,5,7,9,11,13,15],4))\nprint(Solution().maxInWindows([2,3,4,2,6,2,5,1], 3))\n" }, { "alpha_fraction": 0.5970250368118286, "alphanum_fraction": 0.6091954112052917, "avg_line_length": 24.96491241455078, "blob_id": "90574a7419bfaadc9d9616d6b9487b8a856105e1", "content_id": "2a388f1cf6d06286c6e1bbdba553419890b75787", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1773, "license_type": "no_license", "max_line_length": 93, "num_lines": 57, "path": "/offer/55.1平衡二叉树.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n题目描述\n输入一棵二叉树,判断该二叉树是否是平衡二叉树。任意节点的左子树和右子树的深度差不超过1\n\"\"\"\n\n# -*- coding:utf-8 -*-\nclass TreeNode:\n def __init__(self, x):\n self.val = x\n self.left = None\n self.right = None\n\n\nclass Solution:\n\n # 法1:利用55题,求二叉树的深度的方法,遍历树,对每个节点的左右子树求深度,看深度差是否大于1\n # 问题:会右很多重复的计算\n\n def TreeDepth(self, pRoot):\n if not pRoot:\n return 0\n left = self.TreeDepth(pRoot.left)\n right = self.TreeDepth(pRoot.right)\n return max(left, right) + 1\n\n def IsBalanced_Solution1(self, pRoot):\n # write code here\n if not pRoot:\n return True\n left = self.TreeDepth(pRoot.left)\n right = self.TreeDepth(pRoot.right)\n if abs(left - right) > 1:\n return False\n\n return self.IsBalanced_Solution(pRoot.left) and self.IsBalanced_Solution(pRoot.right)\n\n # 法2:每个节点只遍历一次,使用后续遍历的方式,每次遍历到这个节点的时候,其子节点都已经遍历完了\n def IsBalanced(self, pRoot):\n if not pRoot:\n return True, 0\n left, count_left = self.IsBalanced(pRoot.left)\n right, count_right = self.IsBalanced(pRoot.right)\n if (left and right) and abs(count_left - count_right) <= 1:\n return True, max(count_left, count_right) + 1\n\n else:\n return False, -1\n\n def IsBalanced_Solution(self, pRoot):\n res, n = self.IsBalanced(pRoot)\n return res\n\n\nroot = TreeNode(1)\nroot.left = TreeNode(2)\nroot.right = TreeNode(3)\nprint(Solution().IsBalanced_Solution(root))" }, { "alpha_fraction": 0.540669858455658, "alphanum_fraction": 0.5526315569877625, "avg_line_length": 18.904762268066406, "blob_id": "53db210bec5baecd03ba4cde73e50744c1e715c5", "content_id": "73c2e32999f7dd94196a596d33925e6eab7fa7d7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 418, "license_type": "no_license", "max_line_length": 64, "num_lines": 21, "path": "/面试与笔试/笔试/pdd.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nline = list(sys.stdin.readline().strip().split(\";\"))\nnum = list(map(int, line[0].split(\",\")))\nk = int(line[1])\n\nodd = []\neven = []\n\nfor x in num:\n if x % 2 == 0:\n even.append(x)\n else:\n odd.append(x)\n\neven.sort(reverse=True)\nodd.sort(reverse=True)\n\nif len(even) >= k:\n print(\",\".join([str(x) for x in even[:k]]))\nelse:\n print(\",\".join([str(x) for x in even + odd[0:k-len(even)]]))\n" }, { "alpha_fraction": 0.41916167736053467, "alphanum_fraction": 0.46107783913612366, "avg_line_length": 28.41176414489746, "blob_id": "e94ed18aacd2aac245a63d2f7ca2f3931cd18246", "content_id": "8746d68fc71cb44e1fcab4f64b2d0f21ab57b234", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 501, "license_type": "no_license", "max_line_length": 66, "num_lines": 17, "path": "/面试与笔试/笔试/wy2/1.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nT = int(input().strip())\nfor i in range(T):\n line = list(map(int, sys.stdin.readline().strip().split(\" \")))\n tmp = []\n for j,x in enumerate(line):\n tmp.append((j,x))\n tmp.sort(key=lambda x:x[1], reverse=True)\n res = [0] * 3\n for j in range(len(tmp)):\n if tmp[j][1] % 2 == 0:\n div1 = div2 = tmp[j][1] / 2\n else:\n div1 = tmp[j][1] // 2\n div2 = tmp[j][1] // 2 + 1\n # tmp[-1][0] tmp\n print((sum(line) + 2)//3 )\n\n" }, { "alpha_fraction": 0.4732142984867096, "alphanum_fraction": 0.488095223903656, "avg_line_length": 20.0625, "blob_id": "74653f48f11fb2b6b237168076cc24bb76e270b3", "content_id": "5c5d0cbf5ba1828ba40a92ae243f50c746bdf11a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 336, "license_type": "no_license", "max_line_length": 62, "num_lines": 16, "path": "/面试与笔试/笔试/pingan/3.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\n\nn = int(input())\nnums = list(map(int, sys.stdin.readline().strip().split(\" \")))\n\ndef f(nums, n):\n avg = sum(nums) // n\n res = 0\n for i in range(len(nums)):\n if abs(nums[i] - avg) % 2 != 0:\n return -1\n if nums[i] > avg:\n res += (nums[i] - avg)//2\n return res\n\nprint(f(nums, n))" }, { "alpha_fraction": 0.5223641991615295, "alphanum_fraction": 0.5351437926292419, "avg_line_length": 17.969696044921875, "blob_id": "f92cd71579b48b47317efd1d72e9b8cfc117caad", "content_id": "c271b01044a31e1aa28349ee8ce45635aea00aef", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 626, "license_type": "no_license", "max_line_length": 58, "num_lines": 33, "path": "/面试与笔试/笔试/pdd/1.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\na = sys.stdin.readline().strip().split()\nindex = 0\n\nfor i in range(1, len(a)):\n if int(a[i]) < int(a[index]):\n break\n index += 1\n\n# print(index)\n\nleft = int(a[index])\nindex += 2\nif index < len(a):\n right = int(a[index])\nelse:\n right = float(\"+inf\")\n\n# print(left,right)\n# print(100 < right)\n\nb = sys.stdin.readline().strip().split()\nmax_ = float(\"-inf\")\nfor i in range(len(b)):\n if int(b[i]) > right:\n break\n if left <= int(b[i]) <= right:\n max_ =max(max_, int(b[i]))\n\nif max_ == float(\"-inf\"):\n print(\"NO\")\nelse:\n print(' '.join(a[:index-1] + [str(max_)] + a[index:]))\n" }, { "alpha_fraction": 0.5637530088424683, "alphanum_fraction": 0.5733761191368103, "avg_line_length": 24.46938705444336, "blob_id": "d4337ee339e428599a91acbcefb2ea916d3255bd", "content_id": "800d336ce41da98e12a8c49e92e812acd2fa6ba8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1287, "license_type": "no_license", "max_line_length": 84, "num_lines": 49, "path": "/offer/34.二叉树中和为某一值的路径.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\nimport copy\nclass TreeNode:\n def __init__(self, x):\n self.val = x\n self.left = None\n self.right = None\n\nclass Solution:\n # 返回二维列表,内部每个列表表示找到的路径\n def FindPath(self, root, expectNumber):\n # write code here\n if not root:\n return []\n res = []\n path = []\n pathNumbeer = 0\n self.FindPathRecursion(root, expectNumber, res, path, pathNumbeer)\n return res\n\n def FindPathRecursion(self, root, expectNumber, res, path, pathNumbeer):\n pathNumbeer += root.val\n path.append(root.val)\n if not root.left and not root.right:\n if expectNumber == pathNumbeer:\n res.append(copy.deepcopy(path))\n pathNumbeer -= root.val\n path.pop()\n return\n if root.left:\n self.FindPathRecursion(root.left, expectNumber, res, path, pathNumbeer)\n if root.right:\n self.FindPathRecursion(root.right, expectNumber, res, path, pathNumbeer)\n path.pop()\n\n\nroot = TreeNode(10)\nroot.left = TreeNode(5)\nroot.right = TreeNode(12)\nprint(Solution().FindPath(root, 22))\n\n# c = [1,2,3]\n# b = []\n# a = c\n# b.append(a)\n# c.pop()\n# b.append(a)\n# c.pop()\n# print(b)" }, { "alpha_fraction": 0.3794076144695282, "alphanum_fraction": 0.39633285999298096, "avg_line_length": 17.179487228393555, "blob_id": "1cfe5ad52792cc3f2ea018482a5e9a171ffccdf1", "content_id": "98e239f6784ef02f6c17a1f07da277e007d95703", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 709, "license_type": "no_license", "max_line_length": 48, "num_lines": 39, "path": "/面试与笔试/笔试/tx2/22.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "#include <iostream>\n#include <cstdio>\n#include <vector>\n#include <algorithm>\n\nusing namespace std;\n\nint main(){\n int t,k;\n cin >> t >> k;\n int len = 0;\n vector<int> start(t, 0);\n vector<int> end(t, 0);\n vector<int> dp(len+1, 0);\n for(int i = 0; i < t; i++){\n cin >> start[i];\n cin >> end[i];\n len = max(len, end[i]);\n }\n int m = 0;\n for(; m <= len; m++){\n if(m >= k){\n dp[m] = dp[m-1] + dp[m-k];\n }else{\n dp[m] = 1;\n }\n }\n\n for(int i = 0; i<t; i++){\n int out = 0;\n for(int j = start[i]; j <= end[i]; j++){\n out += dp[j];\n }\n cout << out <<endl;\n\n }\n return 0;\n\n}\n" }, { "alpha_fraction": 0.524640679359436, "alphanum_fraction": 0.5595482587814331, "avg_line_length": 29.46875, "blob_id": "7b5fcc693314c973b7ac6f90d87765826a02714f", "content_id": "b6e835d9c17420d9e7a47ee00d3a9e97aea713c6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1134, "license_type": "no_license", "max_line_length": 56, "num_lines": 32, "path": "/offer/11.1快速排序.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# 快速排序的平均时间复杂度为O(nlogn)\n# 在最坏的情况下,有序的情况,快速排序可退化成冒泡排序,最坏时间复杂度为O(n^2),\n# 可以依照nums[low], nums[mid], nums[high]三者取中的法则来选取pivot\n# 需要一个栈空间来实现递归,空间复杂度O(logn)\nclass Solution:\n def Partition(self, nums, low, high):\n pivotkey = nums[low]\n while low < high:\n while low < high and nums[high] >= pivotkey:\n high -= 1\n nums[low] = nums[high]\n while low < high and nums[low] <= pivotkey:\n low += 1\n nums[high] = nums[low]\n nums[low] = pivotkey\n print(nums)\n return low\n\n def QuickSort(self, nums, low, high):\n if low < high:\n partition = self.Partition(nums, low, high)\n self.QuickSort(nums, low, partition-1)\n self.QuickSort(nums, partition+1, high)\n else:\n return\n\n# nums = [49, 38, 65, 97, 76, 13, 27, 49]\nnums = [1,2,3,1,3,4,2,4,5,6,3]\nlow = 0\nhigh = len(nums)-1\nSolution().QuickSort(nums, low, high)\nprint(nums)" }, { "alpha_fraction": 0.41025641560554504, "alphanum_fraction": 0.4285714328289032, "avg_line_length": 27.275861740112305, "blob_id": "e8ad96e14205a70416e3ea887e4772a937356206", "content_id": "80e5e9acdf9e4b38216b9094297971ff99c97ab3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 819, "license_type": "no_license", "max_line_length": 70, "num_lines": 29, "path": "/面试与笔试/笔试/360/qqqqqqq.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\n\nclass Solution:\n def area(self, nums):\n M = len(nums)\n N = len(nums[0])\n res = 0\n for i in range(M):\n for j in range(N):\n res = res + nums[i][j] * 6\n if nums[i][j] > 1:\n res = res - (nums[i][j] - 1) * 2\n if j > 0:\n res = res - min(nums[i][j], nums[i][j - 1]) * 2\n if i > 0:\n res = res - min(nums[i][j], nums[i - 1][j]) * 2\n return res\n\n\nif __name__ == '__main__':\n\n line1 = list(map(int, sys.stdin.readline().strip().split(\" \")))\n N = line1[0]\n input_matrix = []\n for i in range(N):\n line = list(map(int, sys.stdin.readline().strip().split(\" \")))\n input_matrix.append(line)\n\n print(Solution().area(input_matrix))" }, { "alpha_fraction": 0.4347614347934723, "alphanum_fraction": 0.45861732959747314, "avg_line_length": 23.129411697387695, "blob_id": "02a7ff2cfa567841d627f936f6e80cc13d0e67b3", "content_id": "46391eda109643d0af23b56eb6a2d2981c047e93", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2388, "license_type": "no_license", "max_line_length": 58, "num_lines": 85, "path": "/面试与笔试/最长回文字串leetcode5.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n求一个字符串中的最长回文子串\n\"\"\"\n\n# 法一:暴力法,求出所有的子串,然后在看每个子串是否是回文 O(n^3)\n\n# 法二:中心法,以每一个字母为中心,向两边扩展,判断是否是回文,要分成奇数和偶数两种情况 O(n^2)\n\n# 法三:动态规划,设状态dp[j][i]表示索引j到索引i的子串是否是回文串,转移方程为:\n'''\n true j == i\ndp[j][i] = s[i] == s[j] i-j == 1\n s[i] == s[j] and dp[j+1][i-1] i-j > 1\n \n则dp[j][i]为true时表示索引j到索引i形成的子串为回文子串,且子串起点索引为j,长度为i - j + 1。\n算法时间复杂度为O(N ^ 2)。\n'''\n# https://www.jianshu.com/p/c82cada7e5b0\n\n# 法1\ndef longestPalindrome1(s):\n # s = list(s)\n max_len = 1\n start = 0\n for i in range(len(s)):\n for j in range(i+1, len(s)):\n if list(s[i:j+1]) == list(reversed(s[i:j+1])):\n if j - i + 1 > max_len:\n max_len = j - i + 1\n start = i\n return max_len, s[start: start+max_len]\n\n# 法2\ndef longestPalindrome2(s):\n # 长度为\n max_len = 1\n start = 0\n for i in range(len(s)):\n j = i - 1\n k = i + 1\n while j >= 0 and k < len(s) and s[j] == s[k]:\n if k - j + 1 > max_len:\n start = j\n max_len = k - j + 1\n\n j -= 1\n k += 1\n\n for i in range(len(s)):\n j = i\n k = i + 1\n while j >= 0 and k < len(s) and s[j] == s[k]:\n if k - j + 1 > max_len:\n start = j\n max_len = k - j + 1\n j -= 1\n k += 1\n\n return max_len, s[start:start+max_len]\n\n# 法3\ndef longestPalindrome(s):\n dp = [[0]*len(s) for i in range(len(s))]\n max_len = 1\n start = 0\n for i in range(len(s)):\n for j in range(i+1):\n if i - j < 2:\n dp[j][i] = (s[i] == s[j])\n else:\n dp[j][i] = (s[i] == s[j] and dp[j+1][i-1])\n\n if dp[j][i] and i-j+1 > max_len:\n max_len = i-j+1\n start = j\n return max_len, s[start:start+max_len]\n\n\n\nprint(longestPalindrome(\"a\"))\nprint(longestPalindrome(\"aba\"))\nprint(longestPalindrome(\"abccba\"))\nprint(longestPalindrome(\"ab\"))\nprint(longestPalindrome(\"abb\"))\nprint(longestPalindrome(\"abca\"))\n\n\n\n" }, { "alpha_fraction": 0.420560747385025, "alphanum_fraction": 0.4322429895401001, "avg_line_length": 21.473684310913086, "blob_id": "bd5e61aea1e413cccdb068079575d801d8946077", "content_id": "6b3cc88406dd23a5cb4d2bf900aa3ae5329b43de", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 458, "license_type": "no_license", "max_line_length": 42, "num_lines": 19, "path": "/面试与笔试/移除字符串中字母序最小的k个元素.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "#coding=utf-8\n# 移除字符串中字母序最小的k个元素\ndef remove_str(s, k):\n if not s:\n return s\n sorted_s = sorted(s)\n d = {}\n for i in range(k):\n if sorted_s[i] in d:\n d[sorted_s[i]] += 1\n else:\n d[sorted_s[i]] = 1\n res = []\n for i in range(len(s)):\n if s[i] in d.keys()and d[s[i]]!=0:\n d[s[i]] -= 1\n else:\n res.append(s[i])\n return ''.join(res)\n\n" }, { "alpha_fraction": 0.5510203838348389, "alphanum_fraction": 0.5612244606018066, "avg_line_length": 32, "blob_id": "0c5cc5e8b011435397fb6530e1cc3f9946761e14", "content_id": "291e55d214051b0a24e4d2995bc106dfaa5f9623", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 98, "license_type": "no_license", "max_line_length": 45, "num_lines": 3, "path": "/面试与笔试/笔试/wy/1.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "n = int(input())\na = list(map(int, input().split()))\nprint(\" \".join(map(str, [n+1-i for i in a])))" }, { "alpha_fraction": 0.36443883180618286, "alphanum_fraction": 0.37957125902175903, "avg_line_length": 20.94444465637207, "blob_id": "d8e50859f66f257eaf3a68e86200625ef9c1a0e0", "content_id": "70a13f84f0d1f7b8a82bdbd0db318551cfaf8ee4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 793, "license_type": "no_license", "max_line_length": 54, "num_lines": 36, "path": "/面试与笔试/笔试/tx2/2.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# #include <iostream>\n# #include <cstdio>\n# #include <vector>\n# #include <algorithm>\n#\n# using namespace std;\n#\n# int main(){\n# int t,k;\n# cin >> t >> k;\n# int max_n = 0;\n# vector<int> minlen(t, 0);\n# vector<int> maxlen(t, 0);\n# for(int i = 0; i < t; i++){\n# cin >> minlen[i];\n# cin >> maxlen[i];\n# max_n = max(max_n, maxlen[i]);\n# }\n# vector<int> dp(max_n+1, 0);\n# for(int i = 0; i <= max_n; i++){\n# if(i<k){\n# dp[i] = 1;\n# }else{\n# dp[i] = dp[i-1] + dp[i-k];\n# }\n# }\n# for(int i = 0; i<t; i++){\n# int res = 0;\n# for(int j = minlen[i]; j <= maxlen[i]; j++){\n# res += dp[j];\n# }\n# cout << res<<endl;\n# }\n# return 0;\n#\n# }\n\n\n\n" }, { "alpha_fraction": 0.5572755336761475, "alphanum_fraction": 0.5696594715118408, "avg_line_length": 22.14285659790039, "blob_id": "5d7c4815f717674cebf03ce6473af382fbdcd816", "content_id": "4672ce658a42e806963f9af24c8d68b056c57382", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 323, "license_type": "no_license", "max_line_length": 48, "num_lines": 14, "path": "/面试与笔试/笔试/ks/2.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nline = sys.stdin.readline().strip()\nmax_len = 0\nstart = 0\nnearest = {}\nfor i in range(len(line)):\n if line[i] not in nearest.keys():\n nearest[line[i]] = i\n else:\n start = max(nearest[line[i]] + 1, start)\n max_len = max(max_len, i-start + 1)\n nearest[line[i]] = i\n\nprint(max_len)" }, { "alpha_fraction": 0.4463007152080536, "alphanum_fraction": 0.4653937816619873, "avg_line_length": 25.25, "blob_id": "8b70d50cb6c8812b3ee11ebff1864ee98b2e4e56", "content_id": "0502151f5122f14f9d26dd780bab4f2f429e0e61", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 419, "license_type": "no_license", "max_line_length": 66, "num_lines": 16, "path": "/面试与笔试/笔试/hw/3.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nT = int(input())\n\nfor i in range(T):\n res = 0\n max_score = 0\n n = input()\n nums = list(map(int, sys.stdin.readline().strip().split(\" \")))\n for j in range(1, len(nums)):\n for k in range(j - 1, -1, -1):\n if nums[k] > nums[j]:\n res -= 1\n elif nums[k] < nums[j]:\n res += 1\n max_score = max(max_score, res)\n print(max_score, res)" }, { "alpha_fraction": 0.4864864945411682, "alphanum_fraction": 0.5019304752349854, "avg_line_length": 24.899999618530273, "blob_id": "bc607044f4d00f9d7aa1ff00bc18251aeae94072", "content_id": "a75e8ba760e88ec6b57d81c85e4f09dac83fca74", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 259, "license_type": "no_license", "max_line_length": 53, "num_lines": 10, "path": "/offer/15.1Excel中列编号转换.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\nclass Solution:\n def f(self, s):\n num = 0\n for i in range(len(s)):\n num = num * 26 + ord(s[i]) - ord('A') + 1\n return num\nprint(Solution().f('A'))\nprint(Solution().f('AA'))\nprint(Solution().f('AB'))\nprint(Solution().f('Z'))" }, { "alpha_fraction": 0.37354084849357605, "alphanum_fraction": 0.3774318993091583, "avg_line_length": 21.39130401611328, "blob_id": "3a6ab55fdc6c379b0452e4dfd841020078e67cb0", "content_id": "0687054884674ba34feb7acef8dee4c32b63849b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 514, "license_type": "no_license", "max_line_length": 45, "num_lines": 23, "path": "/面试与笔试/笔试/nvidia/44.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "while True:\n try:\n input()\n b = list(input().split())\n input()\n d = list(input().split())\n res = []\n for i in b:\n res.append(d.count(i))\n count = 0\n for i in d:\n if i not in b:\n count += 1\n tmp = []\n for i in range(len(b)):\n s = b[i]+\" : \"+str(res[i])\n tmp.append(s)\n tmp.append(\"Invalid : \" + str(count))\n for i in tmp:\n print(i)\n\n except:\n break" }, { "alpha_fraction": 0.3589108884334564, "alphanum_fraction": 0.3991336524486542, "avg_line_length": 14.813725471496582, "blob_id": "7ec3aedc412c5d61e6565da6fb46df603ce9f96d", "content_id": "6f1636b6294da20d8497bdac505901047b335b4f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1616, "license_type": "no_license", "max_line_length": 63, "num_lines": 102, "path": "/面试与笔试/笔试/didi/2.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import copy\n\nnn = int(input())\nss = input()\ntemp = ss.split()\n\n\ndef evaluate(l):\n ss = ''.join(l)\n return eval(ss)\n\n\nfor ii in range(nn):\n for jj in range(nn - 1):\n t = copy(temp)\n if int(temp[2 * jj]) > int(temp[2 * jj + 2]):\n t[2 * jj], t[2 * jj + 2] = t[2 * jj + 2], t[2 * jj]\n if evaluate(t) == evaluate(temp):\n temp = t\n\nprint(' '.join(temp))\n\n\nimport sys\nn = int(input().strip())\ns = sys.stdin.readline().strip().split(' ')\n\ni = 0\nnum = []\nop = []\nwhile i < len(s) and i + 1 < len(s):\n num.append(s[i])\n op.append(s[i+1])\n i += 2\nnum.append(s[i])\n\nres = []\ntmp = []\npre = \"\"\nfor i in range(len(op)):\n if i == 0:\n tmp.append(num[i])\n pre = op[i]\n else:\n if op[i] == \"+\" or op[i] == \"-\":\n if pre == \"+\" or pre == \"-\":\n tmp.append(num[i])\n else:\n tmp.append(num[i])\n res += sorted(tmp, key=lambda x: int(x))\n tmp = []\n pre = op[i]\n else:\n if pre == \"*\" or pre == \"/\":\n tmp.append(num[i])\n else:\n res += sorted(tmp, key=lambda x: int(x))\n tmp = []\n tmp.append(num[i])\n\n pre = op[i]\n\ntmp.append(num[-1])\nres += sorted(tmp, key=lambda x: int(x))\nj = i = 0\nwhile i < len(op):\n res = res[:j+1] + [op[i]] + res[j+1:]\n j += 2\n i += 1\nprint(' '.join(res))\n\n\n\n\"\"\"\n6\n3 + 2 + 1 + -4 * -5 + 1\n\n\n3\n1 + 2 + 3\n\n7\n3 + 2 + 1 + -4 * -5 * 1 + 2\n\n3\n1 + 3 / 2\n\n5\n3 * 4 / 2 + 5 * 1\n\n1\n3\n\n2\n2 * 1\n\n4\n1 + 3 * 2 * 1\n\n4\n1 - 6 - 5 + 4\n\"\"\"\n\n\n\n" }, { "alpha_fraction": 0.46082088351249695, "alphanum_fraction": 0.5149253606796265, "avg_line_length": 20.440000534057617, "blob_id": "c0b2ca222763fd67524e0aaf139b6359c22548fc", "content_id": "ae042fbc8869e5a72b21239e9fbefe7cd78b68bd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 536, "license_type": "no_license", "max_line_length": 67, "num_lines": 25, "path": "/面试与笔试/笔试/tx2/22222.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nline1 = list(map(int, sys.stdin.readline().strip().split(' ')))\nt = line1[0]\nk = line1[1]\n\nstart = [0] * t\nend = [0] * t\nfor i in range(t):\n line1 = list(map(int, sys.stdin.readline().strip().split(' ')))\n start[i] = line1[0]\n end[i] = line1[1]\n\nmax_len = max(end)\ndp = [0] * (max_len + 1)\nfor i in range(max_len+1):\n if i < k:\n dp[i] = 1\n else:\n dp[i] = dp[i-1] + dp[i-k]\n\nfor i in range(t):\n res = 0\n for j in range(start[i], end[i] + 1):\n res += dp[j]\n print(res % 1000000007)\n" }, { "alpha_fraction": 0.4939576983451843, "alphanum_fraction": 0.5256797671318054, "avg_line_length": 11.50943374633789, "blob_id": "85d1182f68dd8138e885fd466675799ef3029755", "content_id": "eaab00b91d42fde3d27e18dd7f9b47f702a029a8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 700, "license_type": "no_license", "max_line_length": 43, "num_lines": 53, "path": "/面试与笔试/笔试/input/input.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\n\n'''\n求a+b的和,输入格式:\na b\n'''\n\n# for line in sys.stdin:\n# a = line.split('\\t')\n# print(a[0] + a[1])\n\n'''\n求a+b的和,输入格式:\na\nb\n'''\n# a = input()\n# b = input()\n# print(a + b)\n\n'''\nInput:\n3\n1 2 3\n2 1 3\n3 2 1\n\n'''\n# n = int(sys.stdin.readline().strip())\n# for i in range(n):\n# line = sys.stdin.readline().strip()\n# values = list(map(int, line.split()))\n# print(values)\n\n'''\n3\n1 2 3\n2 3\n4\n'''\n# n = int(sys.stdin.readline().strip())\n# for i in range(n):\n# line = sys.stdin.readline().strip()\n# values = list(map(int, line.split()))\n# print(values)\n\n'''\n输入一行2 3\n'''\na = input().split()\nprint(a)\n\na,b,c,d = map(int, input().split())" }, { "alpha_fraction": 0.48341232538223267, "alphanum_fraction": 0.5142180323600769, "avg_line_length": 29.071428298950195, "blob_id": "f3c8f5d2f6d3affc096ffd0736b1953025af0844", "content_id": "9925a018cff07f9487736a523a9f316640956820", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 422, "license_type": "no_license", "max_line_length": 62, "num_lines": 14, "path": "/面试与笔试/笔试/hw/2.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\n\nline = list(map(int, sys.stdin.readline().strip().split(\" \")))\nN = line[0]\nM = line[1]\nval = list(map(int, sys.stdin.readline().strip().split(\" \")))\nfor i in range(M):\n line = sys.stdin.readline().strip().split(\" \")\n if line[0] == \"Q\":\n i = int(line[1]) - 1\n j = int(line[2]) - 1\n print(sum(val[i:j+1])//(j-i+1))\n elif line[0] == \"U\":\n val[int(line[1])-1] += int(line[2])\n\n" }, { "alpha_fraction": 0.4757281541824341, "alphanum_fraction": 0.4890776574611664, "avg_line_length": 26.5, "blob_id": "cd583d27949052b0d9c4a17c87fac11e214eaa3d", "content_id": "fd954d9ae6a99723116db71b0389ad3056937bc0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 824, "license_type": "no_license", "max_line_length": 55, "num_lines": 30, "path": "/面试与笔试/笔试/pdd/2.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\n\nclass solution:\n def get_ring(self,s):\n current = s[0]\n s.pop(0)\n return self.dfs(current, s)\n def dfs(self, current, s_list):\n if len(s_list) == 0:\n if current[0] == current[-1]:\n return True\n return False\n for index, each in enumerate(s_list):\n if each[0] != current[-1]:\n continue\n s = s_list.pop(index)\n if self.dfs(current[0] + each[-1], s_list):\n return True\n s_list.append(s)\n return False\nif __name__ == '__main__':\n s = solution()\n string = sys.stdin.readline()\n string = string.split()\n string = [each[0] + each[-1] for each in string]\n res = s.get_ring(string)\n if res:\n print(\"true\")\n else:\n print(\"false\")" }, { "alpha_fraction": 0.3292011022567749, "alphanum_fraction": 0.3498622477054596, "avg_line_length": 24.034482955932617, "blob_id": "257a3680679656e43725a0f2d2213850b15e9d7e", "content_id": "e3dd399fb06b0aeedde012e18d07357935064949", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 746, "license_type": "no_license", "max_line_length": 44, "num_lines": 29, "path": "/其他/最大交替子序列和.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# 不对 有问题,还没写完\nclass Solution:\n def max_sub_sum(self, nums):\n if not nums:\n return 0\n res = [nums[0]]\n flag = True\n max_sum = nums[0]\n i = 1\n while i < len(nums):\n j = i\n if flag:\n while res[-1] - nums[j] > 0:\n j += 1\n if i == j-1:\n cur_max = nums[i]\n else:\n cur_max = max(nums[i:j])\n res.append(cur_max)\n max_sum += cur_max\n flag = not flag\n i = j\n else:\n i += 1\n continue\n\n return max_sum\n\nprint(Solution().max_sub_sum([4,3,8,5,3,8]))\n" }, { "alpha_fraction": 0.49317944049835205, "alphanum_fraction": 0.532004177570343, "avg_line_length": 26.257143020629883, "blob_id": "b4b54fdb7cfe67199c505ef3d78721721c59080e", "content_id": "9892165fbadb01aed771654ca315a5bd2967ded6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1159, "license_type": "no_license", "max_line_length": 83, "num_lines": 35, "path": "/offer/57.1和为S的连续正数序列.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n'''\n输出描述:\n输出所有和为S的连续正数序列。序列内按照从小至大的顺序,序列间按照开始数字从小到大的顺序\n\n输入一个正整数s,打印所有和为s的连续正数序列(至少包含有两个数),例如输入15,由于1+2+3+4+5 = 4+5+6 = 7+8 = 15,所以打印出三个连续序列\n1~5,4~6,7~8\n'''\nclass Solution:\n def FindContinuousSequence(self, tsum):\n # write code here\n res = []\n if tsum < 3:\n return res\n small = 1\n big = 2\n cur_sum = small + big\n while small <= (tsum + 1) // 2:\n if cur_sum == tsum:\n res.append(range(small, big+1))\n big += 1\n cur_sum += big\n elif cur_sum > tsum:\n cur_sum -= small\n small += 1\n else:\n big += 1\n cur_sum += big\n return res\n\nprint(Solution().FindContinuousSequence(15))\nprint(Solution().FindContinuousSequence(3))\nprint(Solution().FindContinuousSequence(0))\nprint(Solution().FindContinuousSequence(4))\nprint(list(range(1,3)))" }, { "alpha_fraction": 0.35971224308013916, "alphanum_fraction": 0.4190647602081299, "avg_line_length": 26.850000381469727, "blob_id": "d3cd003560d08b811611357471a2dad33a4e319f", "content_id": "0a039b855fc8ac9371a31eb17790000d80376905", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 556, "license_type": "no_license", "max_line_length": 89, "num_lines": 20, "path": "/面试与笔试/笔试/iqiyi/11.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nn = int(input().strip())\na = list(map(int, sys.stdin.readline().strip().split(' ')))\n\ndef f(s):\n n = len(s) + 1\n mod = 10**9+7\n\n dp = [[0]*1002 for i in range(1002)]\n dp[1][1] = 1\n for i in range(2,n+1):\n for j in range(1, i+1):\n if s[i-2] == \"1\":\n dp[i][j] = (dp[i][j-1] + (dp[i-1][i-1] - dp[i-1][j-1]) % mod) % mod\n else:\n dp[i][j] = (dp[i][j - 1] + (dp[i - 1][j - 1] - dp[i - 1][0]) % mod) % mod\n\n return (dp[n][n] + mod) % mod\n\nprint(f(''.join(str(x) for x in a)))" }, { "alpha_fraction": 0.34889349341392517, "alphanum_fraction": 0.4353388547897339, "avg_line_length": 29.4526309967041, "blob_id": "e71cd6f5245f19f382d11c50dbdf5592e72564b3", "content_id": "98775e950c808f685cd6063f7a32c223a1fd3120", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3410, "license_type": "no_license", "max_line_length": 87, "num_lines": 95, "path": "/面试与笔试/背包问题-01背包.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "#coding:utf-8\n\n\"\"\"\n参考:\nhttps://blog.csdn.net/qq_22526061/article/details/83504116\nhttps://blog.csdn.net/qq_34178562/article/details/79959380\nhttps://blog.csdn.net/na_beginning/article/details/62884939\n\"\"\"\n\"\"\"\n0-1背包问题: dp[i][j] = max(dp[ i - 1, j ], dp[ i - 1, j - w[ i ] ] + v [ i ] ) \n\"\"\"\n\ndef pack1(w, v, C): #每个东西只能选择一次\n dp = [[0 for _ in range(C+1)] for _ in range(len(w)+1)]\n for i in range(1, len(w)+1):\n for j in range(1, C+1):\n if j < w[i-1]: #如果剩余容量不够新来的物体 直接等于之前的\n dp[i][j] = dp[i-1][j]\n else:\n dp[i][j] = max(dp[i-1][j], dp[i-1][j-w[i-1]]+ v[i-1])\n return dp[len(w)][c]\n\n# 空间优化\ndef pack2(w, v, c):\n #它是先得到第一行的值,存到dp中,然后再直接用dp相当于就是上一行的值,所以下面必须用逆序\n #否则dp[j-w[i-1]]可能会用到你本行的值,从大到小就不会\n dp = [0 for _ in range(c+1)]\n for i in range(1, len(w)+1):\n for j in reversed(range(1, c+1)):#这里必须用逆序\n if w[i-1] <= j:\n dp[j] = max(dp[j], dp[j-w[i-1]]+v[i-1])\n return dp[c]\n\n# def bag(n, c, w, v):\n# \"\"\"\n# 测试数据:\n# n = 6 物品的数量,\n# c = 10 书包能承受的重量,\n# w = [2, 2, 3, 1, 5, 2] 每个物品的重量,\n# v = [2, 3, 1, 5, 4, 3] 每个物品的价值\n# \"\"\"\n# # 置零,表示初始状态\n# value = [[0 for j in range(c + 1)] for i in range(n + 1)]\n# for i in range(1, n + 1):\n# for j in range(1, c + 1):\n# value[i][j] = value[i - 1][j]\n# # 背包总容量够放当前物体,遍历前一个状态考虑是否置换\n# if j >= w[i - 1] and value[i][j] < value[i - 1][j - w[i - 1]] + v[i - 1]:\n# value[i][j] = value[i - 1][j - w[i - 1]] + v[i - 1]\n# for x in value:\n# print(x)\n# return value\n\n# def show(n, c, w, value):\n# print('最大价值为:', value[n][c])\n# x = [False for i in range(n)]\n# j = c\n# for i in range(n, 0, -1):\n# if value[i][j] > value[i - 1][j]:\n# x[i - 1] = True\n# j -= w[i - 1]\n# print('背包中所装物品为:')\n# for i in range(n):\n# if x[i]:\n# print('第', i+1, '个,', end='')\n#\n# def bag1(n, c, w, v):\n# values = [0 for i in range(c+1)]\n# for i in range(1, n + 1):\n# for j in range(c, 0, -1):\n# # 背包总容量够放当前物体,遍历前一个状态考虑是否置换\n# if j >= w[i-1]:\n# values[j] = max(values[j-w[i-1]]+v[i-1], values[j])\n# return values\n\n\nif __name__ == '__main__':\n n = 6\n c = 10\n w = [2, 2, 3, 1, 5, 2]\n v = [2, 3, 1, 5, 4, 3]\n value = bag(n, c, w, v)\n # [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]\n # [0, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2]\n # [0, 0, 3, 3, 5, 5, 5, 5, 5, 5, 5]\n # [0, 0, 3, 3, 5, 5, 5, 6, 6, 6, 6]\n # [0, 5, 5, 8, 8, 10, 10, 10, 11, 11, 11]\n # [0, 5, 5, 8, 8, 10, 10, 10, 12, 12, 14]\n # [0, 5, 5, 8, 8, 11, 11, 13, 13, 13, 15]\n show(n, c, w, value)\n # 最大价值为: 15\n # 背包中所装物品为:\n # 第 2 个,第 4 个,第 5 个,第 6 个,\n print('\\n空间复杂度优化为N(c)结果:', bag1(n, c, w, v))\n #空间复杂度优化为N(c)结果: [0, 5, 5, 8, 8, 11, 11, 13, 13, 13, 15]" }, { "alpha_fraction": 0.3698884844779968, "alphanum_fraction": 0.4665427505970001, "avg_line_length": 27.36842155456543, "blob_id": "fc00298fb7abd9f951ba792d62a92f7451164017", "content_id": "5955ff83035cd127a82208127ff352e1bcddd3e5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 694, "license_type": "no_license", "max_line_length": 64, "num_lines": 19, "path": "/offer/10.2变态青蛙跳台阶.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n题目描述\n一只青蛙一次可以跳上1级台阶,也可以跳上2级……它也可以跳上n级。求该青蛙跳上一个\nn级的台阶总共有多少种跳法。\n\"\"\"\nclass Solution:\n def jumpFloorII(self, number):\n # 只有一级台阶时:1种,两级:2种f(2)=f(1)+1, 三级:f(3)=f(2)+f(1)+1=2f(2)\n # 四级:f(4)=f(3)+f(2)+f(1)+1=2f(3)=4f(2)\n # 五级:f(5)=f(4)+f(3)+f(2)+f(1)+1=2f(4)=8f(2)\n # f(n)=2f(n-1)=2^(n-2)f(2)=2^(n-1)\n res = [1,-1]\n if number == 1:\n return res[0]\n for i in range(2, number+1):\n res[1] = 2*res[0]\n res[0] = res[1]\n return res[1]" }, { "alpha_fraction": 0.46547314524650574, "alphanum_fraction": 0.4901960790157318, "avg_line_length": 31.58333396911621, "blob_id": "11eb19780cdc92e846ab1a6c666650ce8035bbb0", "content_id": "e8cf40a3a9079f29f9e38254a16900f4a27896e3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1177, "license_type": "no_license", "max_line_length": 64, "num_lines": 36, "path": "/offer/40.最小的k个数.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# 不对\nclass Solution:\n def partition(self, nums, low, high):\n if not nums:\n return False\n pivot = nums[low]\n while low < high:\n while low < high and nums[high] >= pivot:\n high -= 1\n nums[low] = nums[high]\n while low < high and nums[low] <= pivot:\n low += 1\n nums[high] = nums[low]\n nums[low] = pivot\n return low\n\n def GetLeastNumbers_Solution(self, tinput, k):\n if not tinput or len(tinput) == 0 or k > len(tinput):\n return []\n start = 0\n end = len(tinput) - 1\n index = self.partition(tinput, start, end)\n while index != k - 1:\n if index < k - 1:\n start = index + 1\n index = self.partition(tinput, start, end)\n else:\n end = index - 1\n index = self.partition(tinput, start, end)\n # res = []\n # for i in range(k):\n # res.append(tinput[i])\n return tinput[0:k]\n\nprint(Solution().GetLeastNumbers_Solution([4,5,1,6,2,7,3,8], 4))\nprint(Solution().GetLeastNumbers_Solution([4,5,1,6,2,7,3,8],10))\n" }, { "alpha_fraction": 0.4429347813129425, "alphanum_fraction": 0.47010868787765503, "avg_line_length": 13.115385055541992, "blob_id": "16a83e79b8816f900d26b98a1467625543e2ab19", "content_id": "de9b2f53d794b6a77dbcc7ba97c4ed8f8336e9e6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 368, "license_type": "no_license", "max_line_length": 37, "num_lines": 26, "path": "/面试与笔试/笔试/xl.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\na = sys.stdin.readline().strip()\nb = list(map(int, a.split(\",\")))\n\nif len(b) < 1:\n print(0)\n\nb.sort()\npre = b[0]\nindex = 0\n\ncount = 0\ni = 1\nwhile i < len(b):\n while i < len(b) and b[i] == pre:\n b[i] += i-index\n count += i-index\n i += 1\n\n b.sort()\n index += 1\n pre = b[index]\n i = index + 1\n # i += 1\n\nprint(count)\n\n" }, { "alpha_fraction": 0.5271084308624268, "alphanum_fraction": 0.5512048006057739, "avg_line_length": 24.538461685180664, "blob_id": "1b611cef8d14f3403cfe6c3c3042d3629701f13b", "content_id": "1c3a8bb486a21fce9eaeac148edae76199e66e04", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 332, "license_type": "no_license", "max_line_length": 63, "num_lines": 13, "path": "/面试与笔试/笔试/tx2/1.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "from collections import Counter\nimport sys\n# a = [1,1,2,3]\nn = int(input())\nfor i in range(n):\n input()\n a = list(map(int, sys.stdin.readline().strip().split(' ')))\n d = Counter(a)\n res = sorted(d.items(), key=lambda x:x[1], reverse=True)\n if res[0][1] > len(a)//2:\n print(\"NO\")\n else:\n print(\"YES\")\n" }, { "alpha_fraction": 0.531383752822876, "alphanum_fraction": 0.5499286651611328, "avg_line_length": 25.41509437561035, "blob_id": "71abd0b84329d40e2b1fdcf9d78502f543977df5", "content_id": "b38fdc4b37088daf7657d77a1cc2880aabfe03fc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1402, "license_type": "no_license", "max_line_length": 56, "num_lines": 53, "path": "/二叉树的遍历/层序遍历.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "class BiTree:\n def __init__(self, x):\n self.val = x\n self.left = None\n self.right = None\n\nclass solutionCirculation:\n def level_order_traversal(self, root):\n res = []\n if not root:\n return res\n queue = [root]\n while queue:\n p = queue.pop(0)\n res.append(p.val)\n if p.left:\n queue.append(p.left)\n if p.right:\n queue.append(p.right)\n return res\n\nclass solutionRecursion:\n def level_order_traversal(self, root):\n if not root:\n return []\n res = [[]]\n self.recursion(root, 1, res)\n return [k for item in res[:-1] for k in item]\n def recursion(self, root, level, res):\n if not root:\n return\n else:\n res[level - 1].append(root.val)\n if len(res) == level:\n res.append([])\n self.recursion(root.left, level + 1, res)\n self.recursion(root.right, level + 1, res)\n\n\nroot = BiTree(45)\na = BiTree(12)\nb = BiTree(53)\nroot.left = a\nroot.right = b\na.left = BiTree(3)\na.right = BiTree(37)\na.right.left = BiTree(24)\nb.right = BiTree(100)\nb.right.left = BiTree(61)\nb.right.left.right = BiTree(90)\nb.right.left.right.left = BiTree(78)\nprint(solutionCirculation().level_order_traversal(root))\nprint(solutionRecursion().level_order_traversal(root))\n\n\n" }, { "alpha_fraction": 0.44594594836235046, "alphanum_fraction": 0.4864864945411682, "avg_line_length": 23.66666603088379, "blob_id": "878b63cf8da17ab042a1fbdfd7d589a6620be131", "content_id": "245f74d0eb9feef7f75b66a9c9064e76e58650ab", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 918, "license_type": "no_license", "max_line_length": 60, "num_lines": 27, "path": "/offer/10.斐波那契数列.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n题目描述\n大家都知道斐波那契数列,现在要求输入一个整数n,请你输出斐波那契数列的第n项(从0开始,第0项为0)。\nn<=39\n\"\"\"\nclass Solution:\n def Fibonacci1(self, n):\n # 法一:递归,会进行很多重复的运算,当n过大的时候速度会很慢,复杂度是\n # n的指数方式递增的,在这个问题中会time limited\n if n == 0:\n return 0\n if n == 1:\n return 1\n return self.Fibonacci(n - 1) + self.Fibonacci(n - 2)\n\n def Fibonacci(self, n):\n # 自下而上,把已经算好的结果保存起来,减少重复计算\n # 复杂度 O(n)\n res = [0, 1, -1]\n if n < 2:\n return res[n]\n for i in range(2, n + 1):\n res[2] = res[0] + res[1]\n res[0] = res[1]\n res[1] = res[2]\n return res[2]\n" }, { "alpha_fraction": 0.5259653925895691, "alphanum_fraction": 0.5432756543159485, "avg_line_length": 18.657894134521484, "blob_id": "dcfa37844e3953a27bf0b5fbee1926e18ee7af38", "content_id": "f2347e7a7d9cadc10e009a049623505125bb9c5b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 751, "license_type": "no_license", "max_line_length": 37, "num_lines": 38, "path": "/二叉树的遍历/判断一个树是否是二叉排序树.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "class BiTree:\n def __init__(self, x):\n self.val = x\n self.left = None\n self.right = None\n\nclass Solution:\n def __init__(self):\n self.flag = True\n self.last = float('-inf')\n def isBSTree(self, root):\n if root.left and self.flag:\n self.isBSTree(root.left)\n if root.val < self.last:\n self.flag = False\n self.last = root.val\n if root.right and self.flag:\n self.isBSTree(root.right)\n return self.flag\n \na = BiTree(12)\nb = BiTree(5)\nc = BiTree(18)\nd = BiTree(2)\ne = BiTree(9)\nf = BiTree(15)\ng = BiTree(19)\nh = BiTree(13)\n\na.left = b\na.right = c\nb.left = d\nb.right = e\nc.left = f\nc.right = g\nf.left = h\nm = Solution().isBSTree(a)\nprint(m)\n\n\n\n\n" }, { "alpha_fraction": 0.469696968793869, "alphanum_fraction": 0.5277777910232544, "avg_line_length": 23.6875, "blob_id": "55626f8797b2c02d535cd90ef336af092ca5e50c", "content_id": "65c18c95fc4781047e66f951715f1cea84ba90ff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 396, "license_type": "no_license", "max_line_length": 64, "num_lines": 16, "path": "/面试与笔试/笔试/xm/1.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\ndef maxProfit(nums):\n if len(nums) <= 1:\n return 0\n res1, res2 = 0, 0\n tmp1, tmp2 = -nums[0], -nums[0]\n\n for i in nums[1:]:\n tmp1 = max(tmp1, -i)\n res1 = max(res1, tmp1 + i)\n tmp2 = max(tmp2, res1 - i)\n res2 = max(res2, tmp2 + i)\n return res2\n\n# nums = list(map(int, sys.stdin.readline().strip().split(\" \")))\n# print(maxProfit(nums))\n\n" }, { "alpha_fraction": 0.48275861144065857, "alphanum_fraction": 0.517241358757019, "avg_line_length": 19.785715103149414, "blob_id": "bbcaca619871f5735b5b87c6bab9b4e7e405392c", "content_id": "bb27554e775352dac40965f57d21a5f38384dea1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 412, "license_type": "no_license", "max_line_length": 41, "num_lines": 14, "path": "/offer/15.3判断两个整数m和n要改变多少位二进制才相等.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n输入两个整数m和n,计算需要改变m的二进制表示中的多少位才能得到n\n\"\"\"\nclass Solution:\n def f(self, m, n):\n # 可以分两步进行,第一步先对m和n取异或,第二步算出异或结果中有多少个1\n p = m ^ n\n count = 0\n for i in range(32):\n if p & (1<<i):\n count += 1\n return count\n\nprint(Solution().f(10, 13))" }, { "alpha_fraction": 0.5115681290626526, "alphanum_fraction": 0.5269922614097595, "avg_line_length": 21.882352828979492, "blob_id": "48c6402b55cbca04a5ef456ca5df27cfe95614be", "content_id": "c6ffad9b0d4b4d43c190510ddbe00f758eaf9ce5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 389, "license_type": "no_license", "max_line_length": 66, "num_lines": 17, "path": "/面试与笔试/笔试/jd/2.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nimport random\nT = int(input())\nfor i in range(T):\n line = list(map(int, sys.stdin.readline().strip().split(' ')))\n n = line[0]\n m = line[1]\n matrix = []\n for j in range(n):\n line = list(sys.stdin.readline().strip())\n matrix.append(line)\n x = random.randrange(0,1)\n # print(x)\n if x < 0.5:\n print(\"NO\")\n else:\n print(\"YES\")\n" }, { "alpha_fraction": 0.4538087546825409, "alphanum_fraction": 0.49108588695526123, "avg_line_length": 25.826086044311523, "blob_id": "6d2a83c0a249ed25e641620d1afb87e15e2c2686", "content_id": "5c74085a3f580e2b33be0fd1814ab4618111919e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 763, "license_type": "no_license", "max_line_length": 44, "num_lines": 23, "path": "/offer/11.2公司员工排序.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n实现一个排序算法,要求时间效率为O(n),对公司员工的年龄进行排序(员工数量几万人,\n其实是在一个小范围内进行排序),只允许使用常量大小的辅助空间,不能超过O(n)\n\"\"\"\nclass Solution:\n def sort(self, nums):\n if not nums or len(nums) == 0:\n return nums\n count_of_age = [0] * 99\n for i in range(len(nums)):\n if 1 <= nums[i] <= 99:\n count_of_age[nums[i]] += 1\n else:\n print(\"age out of range\")\n index = 0\n for i in range(99):\n for j in range(count_of_age[i]):\n nums[index] = i\n index += 1\n\nages = [1,2,3,1,3,4,2,4,5,6,3]\nSolution().sort(ages)\nprint(ages)\n" }, { "alpha_fraction": 0.38652482628822327, "alphanum_fraction": 0.4066193997859955, "avg_line_length": 19.658536911010742, "blob_id": "9bef3692c25e1edac6ee4b8d32dfec12a29d865c", "content_id": "cb5c122d07418a19fd7107b965bcfc4a930b7a51", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 846, "license_type": "no_license", "max_line_length": 67, "num_lines": 41, "path": "/面试与笔试/笔试/ks/33.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "#include <iostream>\n#include <cstdio>\n#include <vector>\n#include <string>\n\nusing namespace std;\n\nint main(){\n int n;\n cin >> n;\n int pp[n];\n int qq[n];\n for(int i=0;i<n;i++){\n scanf(\"%d\",&pp[i]);\n }\n for(int i=0;i<n;i++){\n scanf(\"%d\",&qq[i]);\n }\n int max_pp = 0;\n int result=0;\n int id, max_qq,len;\n int flag[n]={0};\n for(int i=0;i<n;i++){\n id = 0;\n max_qq = 0;\n len = 0;\n for(int j=0;j<n;j++){\n if(flag[j]==0 && max_qq<(qq[j]+2*max(0,pp[j]-max_pp))){\n id =j;\n max_qq = qq[j];\n max_qq = max_qq + 2*max(0,pp[j]-max_pp)\n len = pp[j];\n }\n }\n result = result + max_qq;\n flag[id] =1;\n max_pp = max(max_pp,len);\n cout<<result<<endl;\n }\n return 0;\n}" }, { "alpha_fraction": 0.5154061913490295, "alphanum_fraction": 0.5392156839370728, "avg_line_length": 25.44444465637207, "blob_id": "78c5fc34135546fac49b4579d8205b6987b28a60", "content_id": "68a777330be2afb472edc7a1a3fcaf30b3a485a3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 714, "license_type": "no_license", "max_line_length": 90, "num_lines": 27, "path": "/面试与笔试/笔试/wyhy/aaa.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\n\nline1 = list(map(int, sys.stdin.readline().strip().split(\" \")))\nn, m = line1[0], line1[1]\nnum1 = list(map(int, sys.stdin.readline().strip().split(\" \")))\nnum2 = list(map(int, sys.stdin.readline().strip().split(\" \")))\nnum1.sort()\nnum2.sort(reverse=True)\n\ncur_sum = m - 1\nres = []\ndiff_i = []\ndiff_j = []\nwhile cur_sum >= 0:\n if len(diff_i) == len(num1):\n break\n for i in range(len(num1)):\n for j in range(len(num2)):\n if i not in diff_i and j not in diff_j and (num1[i] + num2[j]) % m == cur_sum:\n diff_i.append(i)\n diff_j.append(j)\n res.append(cur_sum)\n\n cur_sum -= 1\n # print res\n\nprint(' '.join([str(i) for i in res]))\n" }, { "alpha_fraction": 0.4887307286262512, "alphanum_fraction": 0.536180317401886, "avg_line_length": 21.783782958984375, "blob_id": "b3c275b0a0d532be991da4c704e613b5178d47ec", "content_id": "90b980ae3d683abc3e6d28e0190bd759312ea137", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 865, "license_type": "no_license", "max_line_length": 42, "num_lines": 37, "path": "/二叉树的遍历/深度优先遍历.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# 其实就是前序遍历\nclass BiTree:\n def __init__(self, x):\n self.val = x\n self.left = None\n self.right = None\nclass Solution:\n def deep_traversal(self, root):\n # 迭代法\n res = []\n if not root:\n return res\n\n stack = [root]\n while len(stack):\n p = stack.pop()\n res.append(p.val)\n if p.right:\n stack.append(p.right)\n if p.left:\n stack.append(p.left)\n return res\n\nroot = BiTree(45)\na = BiTree(12)\nb = BiTree(53)\nroot.left = a\nroot.right = b\na.left = BiTree(3)\na.right = BiTree(37)\na.right.left = BiTree(24)\nb.right = BiTree(100)\nb.right.left = BiTree(61)\nb.right.left.right = BiTree(90)\nb.right.left.right.left = BiTree(78)\nprint(Solution().deep_traversal(root))\n# [45, 12, 3, 37, 24, 53, 100, 61, 90, 78]\n" }, { "alpha_fraction": 0.3817427456378937, "alphanum_fraction": 0.43015214800834656, "avg_line_length": 30.478260040283203, "blob_id": "98027ef2d1e31b026dbfbe9941d8bc349c2b3952", "content_id": "9bc802edf6c1a6c0a6cd5ee54a21ab339ddbb7ec", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 773, "license_type": "no_license", "max_line_length": 66, "num_lines": 23, "path": "/最长公共(子)/最长公共子串.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n动态规划:\ndp[i][j]表示以str1[i]和str2[j]为结尾的公共子串的长度\n转移方程:\n 0 if i =0 or j = 0\ndp[i][j] = dp[i-1][j-1] + 1 if str1[i] = str2[j]\n 0 if str1[i] != str2[j]\n\"\"\"\n\nclass Solution:\n def longest_substring(self, str1, str2):\n dp = [[0] * (len(str2) + 1) for i in range(len(str1) + 1)]\n result = 0\n for i in range(1, len(str1)+1):\n for j in range(1, len(str2)+1):\n if str1[i-1] == str2[j-1]:\n dp[i][j] = dp[i-1][j-1] + 1\n result = max(dp[i][j], result)\n else:\n dp[i][j] = 0\n return result\n\nprint(Solution().longest_substring('abcdefg', 'xyzabcd'))" }, { "alpha_fraction": 0.5693622827529907, "alphanum_fraction": 0.5923734307289124, "avg_line_length": 27.185184478759766, "blob_id": "9b208c284476df225d4f3df84bf511b05cc4c632", "content_id": "577e750ad758a339f4d07792fb5202589c455df7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1587, "license_type": "no_license", "max_line_length": 82, "num_lines": 54, "path": "/面试与笔试/二叉搜索树的删除.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n给定一颗二叉搜索树(不是二叉平衡树),删除其中一个给定的节点\n\"\"\"\nclass TreeNode:\n def __init__(self, x):\n self.val = x\n self.left = None\n self.right = None\n \nclass Solution:\n # 递归法\n def deleteNodeRecursion(self, root, key):\n if not root:\n return None\n if root.val > key:\n root.left = self.deleteNodeRecursion(root.left, key)\n elif root.val < key:\n root.right = self.deleteNodeRecursion(root.right, key)\n else:\n if not root.left or not root.right:\n root = root.left if root.left else root.right\n else:\n cur = root.right\n while cur.left:\n cur = cur.left\n root.val = cur.val\n root.right = self.deleteNodeRecursion(root.right, cur.val)\n return root\n\n def traversal(self, root):\n if not root:\n return []\n return [root.val] + self.traversal(root.left) + self.traversal(root.right)\n\nroot = TreeNode(45)\na = TreeNode(12)\nb = TreeNode(53)\nroot.left = a\nroot.right = b\na.left = TreeNode(3)\na.right = TreeNode(37)\na.right.left = TreeNode(24)\nb.right = TreeNode(100)\nb.right.left = TreeNode(61)\nb.right.left.right = TreeNode(90)\nb.right.left.right.left = TreeNode(78)\ns = Solution()\nprint(s.traversal(root))\n# t1 = s.deleteNodeRecursion(root, 45)\n# print(s.traversal(t1))\n# t1 = s.deleteNodeRecursion(root, 53)\n# t1 = s.deleteNodeRecursion(root, 100)\nt1 = s.deleteNodeRecursion(root, 12)\nprint(s.traversal(t1))" }, { "alpha_fraction": 0.5603996515274048, "alphanum_fraction": 0.5694822669029236, "avg_line_length": 28.756755828857422, "blob_id": "25e516ef45b90ef9bffcf664f013a624fbd266a6", "content_id": "89cdda3162b2866b4f1a89c316bd627e350fbdda", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1101, "license_type": "no_license", "max_line_length": 88, "num_lines": 37, "path": "/查找/二叉排序树的生成.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "class BiTree:\n def __init__(self, x):\n self.val = x\n self.left = None\n self.right = None\n\n\nclass Solution:\n def searchBST(self, root, node, parent):\n if not root:\n return False, parent\n if root.val == node:\n return True, None\n elif root.val > node:\n return self.searchBST(root.left, node, root)\n else:\n return self.searchBST(root.right, node, root)\n\n def insertBST(self, root, node):\n parent = None\n flag, parent = self.searchBST(root, node, parent)\n if not flag:\n if not parent:\n root = BiTree(node)\n elif root.val > node:\n parent.left = BiTree(node)\n else:\n parent.right = BiTree(node)\n return root\n\nroot = None\nroot = Solution().insertBST(root, 45)\nroot = Solution().insertBST(root, 24)\nroot = Solution().insertBST(root, 53)\nroot = Solution().insertBST(root, 12)\nroot = Solution().insertBST(root, 90)\nprint(root.val, root.left.val, root.right.val, root.left.left.val, root.right.right.val)\n" }, { "alpha_fraction": 0.5539929866790771, "alphanum_fraction": 0.5600200891494751, "avg_line_length": 22.678571701049805, "blob_id": "4dbca63063758eb9a790dafa1ffff3fae68008e3", "content_id": "7117bd926d46d1b16ad52d74603cd8cc43ccc0c0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2441, "license_type": "no_license", "max_line_length": 64, "num_lines": 84, "path": "/offer/35.复杂链表的复制.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\nclass RandomListNode:\n def __init__(self, x):\n self.label = x\n self.next = None\n self.random = None\n\n\nclass Solution:\n # 返回 RandomListNode\n # 法一:先复制链表,然后再对每一个节点设置random指针,由于random指针指向的位置不确定,所以得\n # 从原始链表的头部开始,走s部,到达所指向的节点,那么在新链表中也走s步,time:O(N^2),space:O(1)\n\n # 法二:先复制链表,但复制每个节点的同时把random信息存到一个hash表中,有了hash表可以再O(1)的时\n # 间找到random,time:O(n) spacd:O(n)\n\n # 法三:不用辅助空间,复制节点的时候直接把节点放在被复制的节点后面,这样新节点的random可以通过\n # 原来节点找到\n\n def Clone(self, pHead):\n # write code here\n if not pHead:\n return pHead\n cur = pHead\n # 先复制链表,并把新节点放在原节点后面\n while cur:\n node = RandomListNode(str(cur.label) + \"*\")\n node.next = cur.next\n cur.next = node\n cur = cur.next.next\n # 再遍历一次,设置random指针\n cur = pHead\n while cur:\n random = cur.random\n if random:\n cur.next.random = random.next\n cur = cur.next.next\n # 将新链表抽出来\n head = cur = pHead.next\n pHead.next = cur.next\n old = pHead.next\n while old:\n cur.next = old.next\n cur = cur.next\n old.next = cur.next\n old = old.next\n return head\n\n# 测试:\n# 1. 空\n# 2. 只有一个节点\n# 3. random指向节点自身\n# 4. 两个节点的random形成环状\n\nprint(Solution().Clone(None))\nprint(\"______________\")\n\na = RandomListNode(1)\na.random = a\nclone_a = Solution().Clone(a)\nwhile clone_a:\n print(clone_a.label, clone_a.next, clone_a.random.label)\n clone_a = clone_a.next\nprint(\"______________\")\n\na = RandomListNode(1)\nb = RandomListNode(2)\nc = RandomListNode(3)\na.next = b\na.random = c\nb.next = c\nb.random = b\nc.random = a\ncur = a\n# while cur:\n# print(cur.label, cur.next, cur.random.label)\n# cur = cur.next\n# print(\"______________\")\n\nclone_a = clone = Solution().Clone(a)\nwhile clone_a:\n print(clone_a.label, clone_a.next, clone_a.random.label)\n clone_a = clone_a.next\nprint(\"______________\")\n\n\n" }, { "alpha_fraction": 0.49831271171569824, "alphanum_fraction": 0.5151855945587158, "avg_line_length": 28.633333206176758, "blob_id": "f4424a1d14b3781cb596273eea9c4b92fd6cef26", "content_id": "39c4c49d1930c2013fc49750ca6364d3889799a8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 911, "license_type": "no_license", "max_line_length": 71, "num_lines": 30, "path": "/面试与笔试/笔试/xm/11.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n股票买卖两次的最大获利\n\"\"\"\nimport sys\nclass Solution:\n def max_profit(self, prices):\n minprice = float('inf')\n maxprice = float('-inf')\n maxprofit1 = [0]*len(prices)\n maxprofit2 = [0]*len(prices)\n for i in range(len(prices)):\n if prices[i] < minprice:\n minprice = prices[i]\n else:\n maxprofit1[i] = prices[i] - minprice\n\n for i in range(len(prices)-1, -1, -1):\n if prices[i] > maxprice:\n maxprice = prices[i]\n else:\n maxprofit2[i] = maxprice - prices[i]\n\n res = 0\n for i in range(len(prices)):\n # print(maxprofit1[i],maxprofit2[i] )\n res = max(res, max(maxprofit1[:i+1]) + max(maxprofit2[i:]))\n return res\n\nnums = list(map(int, sys.stdin.readline().strip().split(\" \")))\nprint(Solution().max_profit(nums))\n" }, { "alpha_fraction": 0.32413792610168457, "alphanum_fraction": 0.36379310488700867, "avg_line_length": 25.409090042114258, "blob_id": "b9e8b83bc7be267d435cb5cb6ab310e142cfa98c", "content_id": "a200174d59e071fa353f97059aa8f64afa7d18eb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 580, "license_type": "no_license", "max_line_length": 61, "num_lines": 22, "path": "/面试与笔试/笔试/zhaohang/11.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\ns = list(sys.stdin.readline().strip())\ncount = [1]*len(s)\n\ni = 0\nwhile i < len(s):\n if i < len(s) - 1 and s[i] == 'R' and s[i+1] == s[i]: # 0\n count[i] = 0\n i += 1\n elif i > 0 and s[i] == 'L' and s[i] == s[i-1]: # -1\n count[i] = 0\n i += 1\n elif i < len(s) - 1 and s[i] == 'R' and s[i+1] == 'L':\n j = i-1\n while j >= 0 and s[j] == 'R':\n if (i-j)%2 == 0:\n count[i] += 1\n else:\n count[i+1] += 1\n j -= 1\n i += 2\nprint(' '.join([str(x) for x in count]))" }, { "alpha_fraction": 0.4788494110107422, "alphanum_fraction": 0.5279187560081482, "avg_line_length": 24.7391300201416, "blob_id": "39564755c1bf716a2bf5f7b648210ba5d64e5b4b", "content_id": "b3904b122efe404c580fd446dc58ad1367a4d179", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 895, "license_type": "no_license", "max_line_length": 44, "num_lines": 23, "path": "/offer/10.1青蛙跳台阶问题.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n题目描述\n一只青蛙一次可以跳上1级台阶,也可以跳上2级。求该青蛙跳上一个n级的台阶总共有多少\n种跳法(先后次序不同算不同的结果)。\n\"\"\"\nclass Solution:\n def jumpFloor(self, number):\n # 是斐波那契数列问题的变种,如果只有一级台阶,1种跳法,2级,两种跳法\n # 当n>2时,第一次跳有两种跳法:\n # 1.跳一级,此时跳法数目为剩下的n-1级台阶跳法数目\n # 2.跳两级,此时跳法数目为剩下的n-2级台阶跳法数目\n # f(n) = f(n-1) + f(n-2)\n res = [1,2,-1]\n if number <= 2:\n return res[number-1]\n for i in range(3, number+1):\n res[2] = res[0] + res[1]\n res[0] = res[1]\n res[1] = res[2]\n return res[2]\n\n # 类似题:leetcode上91_Decode_Ways" }, { "alpha_fraction": 0.3839050233364105, "alphanum_fraction": 0.4168865382671356, "avg_line_length": 17.975000381469727, "blob_id": "956a70ccdc6c3983f719cca132deca38f3e9de61", "content_id": "0f6a0d0af0da1a20d2ae40a4f98b6d0005b9738e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 758, "license_type": "no_license", "max_line_length": 48, "num_lines": 40, "path": "/面试与笔试/笔试/tx2/222.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "#include <iostream>\n#include <cstdio>\n#include <vector>\n#include <algorithm>\n\nusing namespace std;\n\nint main(){\n int t,k;\n cin >> t >> k;\n vector<int> left(t,0);\n vector<int> right(t,0);\n\n int len = 0;\n int m = 0;\n for(int i = 0; i < t; i++){\n cin >> left[i];\n cin >> right[i];\n len = max(len, right[i]);\n }\n int index = 0;\n int n = len+1;\n vector<int> dp(n,0);\n\n for(int i=0;i<=len;i++){\n if(i >= k){\n dp[i]=dp[i-1]+dp[i-k];\n }else{\n dp[i]=1;\n }\n }\n for(int i = 0; i < t; i++){\n int res = 0;\n for(int j = left[i];j <= right[i];j++ ){\n res += dp[j];\n }\n cout << res % (1000000000+7) << endl;\n }\n return 0;\n}" }, { "alpha_fraction": 0.5087633728981018, "alphanum_fraction": 0.5116845369338989, "avg_line_length": 23.129411697387695, "blob_id": "ebaa5fffbeacd147b907762c252d29aada808bfe", "content_id": "5a95a6a07f607dcfbd6f8e3aa38febb8c4ddf291", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2234, "license_type": "no_license", "max_line_length": 61, "num_lines": 85, "path": "/面试与笔试/复杂链表的复制.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "class LinkNode:\n def __init__(self, x):\n self.next = None\n self.jump = None\n self.val = x\n\nclass Solution:\n # 法三:不用辅助空间,复制节点的时候直接把节点放在被复制的节点后面,这样新节点的random可以通过\n # 原来节点找到\n def clone(head):\n if not head:\n return head\n cur = head\n while cur:\n node = LinkNode(str(cur.val) + \"'\")\n node.next = cur.next\n cur.next = node\n cur = cur.next.next\n cur = head\n while cur:\n jump = cur.jump\n if jump:\n cur.next.jump = jump.next\n cur = cur.next.next\n\n new_head = cur = head.next\n old = head\n old.next = new_head.next\n old = old.next.next\n while old:\n cur.next = old.next\n cur = cur.next\n old.next = cur.next\n old = old.next\n return new_head\n\n # 法二:先复制链表,但复制每个节点的同时把random信息存到一个hash表中,有了hash表可以再O(1)的时\n # 间找到random,time:O(n) spacd:O(n)\n def clone2(head):\n if not head:\n return head\n\n map = dict()\n cur = head\n while cur:\n node = LinkNode(str(cur.val) + \"'\")\n map[cur] = node\n cur = cur.next\n node = head\n while node:\n map.get(node).next = map.get(node.next)\n map.get(node).jump = map.get(node.jump)\n node = node.next\n return map.get(head)\n\nprint(Solution().Clone(None))\nprint(\"______________\")\n\na = LinkNode(1)\na.random = a\nclone_a = Solution().Clone(a)\nwhile clone_a:\n print(clone_a.val, clone_a.next, clone_a.random.val)\n clone_a = clone_a.next\nprint(\"______________\")\n\na = LinkNode(1)\nb = LinkNode(2)\nc = LinkNode(3)\na.next = b\na.random = c\nb.next = c\nb.random = b\nc.random = a\n# cur = a\n# while cur:\n# print(cur.label, cur.next, cur.random.label)\n# cur = cur.next\n# print(\"______________\")\n\nclone_a = clone = Solution().Clone(a)\nwhile clone_a:\n print(clone_a.label, clone_a.next, clone_a.random.label)\n clone_a = clone_a.next\nprint(\"______________\")\n\n\n\n" }, { "alpha_fraction": 0.4881439208984375, "alphanum_fraction": 0.5151267647743225, "avg_line_length": 31.1842098236084, "blob_id": "aae8263dba76075893b7432b1cdb96d466d0cab8", "content_id": "e361de7cc1b5ee74411a93069a78ea23964870af", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1223, "license_type": "no_license", "max_line_length": 57, "num_lines": 38, "path": "/面试与笔试/笔试/wy/4.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "class Solution:\n def __init__(self):\n self.result = \"NO\"\n\n def reverse(self, L):\n res = L[::-1]\n res_copy = res.copy()\n while res[0] == 0:\n del res[0]\n return res_copy, res\n\n def judge(self, target, index):\n if index == len(target):\n self.result = \"YES\"\n if target[index: index + len(L0)] == L0:\n self.judge(target, index + len(L0))\n if target[index:index + len(L1)] == L1:\n self.judge(target, index + len(L1))\n if target[index:index + len(L0_res)] == L0_res:\n self.judge(target, index + len(L0_res))\n if target[index:index + len(L1_res)] == L1_res:\n self.judge(target, index + len(L1_res))\n if target[index:index + len(L0_copy)] == L0_copy:\n self.judge(target, index + len(L0_copy))\n if target[index:index + len(L1_copy)] == L1_copy:\n self.judge(target, index + len(L1_copy))\n\n\nif __name__ == '__main__':\n s = Solution()\n L0 = [int(n) for n in input()]\n L1 = [int(n) for n in input()]\n L2 = [int(n) for n in input()]\n L0_copy, L0_res = s.reverse(L0)\n L1_copy, L1_res = s.reverse(L1)\n\n s.judge(L2, 0)\n print(s.result)\n" }, { "alpha_fraction": 0.5649038553237915, "alphanum_fraction": 0.5769230723381042, "avg_line_length": 28.785715103149414, "blob_id": "1497f93d7534a0cd68eb645f8a3a29c8849f8f04", "content_id": "ec68c15e43617dcc18b3e748774baa25d01929f1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 416, "license_type": "no_license", "max_line_length": 67, "num_lines": 14, "path": "/面试与笔试/笔试/tx/2.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nn = int(input())\nfor k in range(n):\n line1 = list(map(int, sys.stdin.readline().strip().split(' ')))\n n = line1[0]\n # m = line1[1]\n matrix = []\n for i in range(n):\n matrix.append(sys.stdin.readline().strip())\n print(matrix)\n start = list(map(int, sys.stdin.readline().strip().split(' ')))\n end = list(map(int, sys.stdin.readline().strip().split(' ')))\n\n print(start, end)" }, { "alpha_fraction": 0.5161054134368896, "alphanum_fraction": 0.5534406900405884, "avg_line_length": 35.945945739746094, "blob_id": "28c56724069e51703c75db9aaf35617540bb7fb1", "content_id": "13a594ec25136e9a08b04f9b43dc60c56dacf029", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1634, "license_type": "no_license", "max_line_length": 117, "num_lines": 37, "path": "/offer/13.机器人的运动范围.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n题目描述\n地上有一个m行和n列的方格。一个机器人从坐标0,0的格子开始移动,每一次只能向左,右,上,下四个方向移动一格,但是\n不能进入行坐标和列坐标的数位之和大于k的格子。 例如,当k为18时,机器人能够进入方格(35,37),因为3+5+3+7 = 18。\n但是,它不能进入方格(35,38),因为3+5+3+8 = 19。请问该机器人能够达到多少个格子?\n\"\"\"\nclass Solution:\n def movingCount(self, threshold, rows, cols):\n # 回溯\n if rows < 1 or cols < 1:\n return 0\n visited = [0] * (rows * cols)\n count = self.moving(threshold, rows, cols, 0, 0, visited)\n return count\n\n def moving(self, threshold, rows, cols, row, col, visited):\n count = 0\n if 0 <= row < rows and 0 <= col < cols and not visited[row * cols + col] and self.check(threshold, row, col):\n visited[row * cols + col] = True\n count = 1 + self.moving(threshold, rows, cols, row, col - 1, visited) + \\\n self.moving(threshold, rows, cols, row, col + 1, visited) + \\\n self.moving(threshold, rows, cols, row - 1, col, visited) + \\\n self.moving(threshold, rows, cols, row + 1, col, visited)\n return count\n\n def check(self, threshold, row, col):\n index_sum = 0\n while row:\n index_sum += row % 10\n row = row // 10\n while col:\n index_sum += col % 10\n col = col // 10\n return index_sum <= threshold\n\nprint(Solution().movingCount(1,2,2))" }, { "alpha_fraction": 0.3228663504123688, "alphanum_fraction": 0.3743961453437805, "avg_line_length": 13.92771053314209, "blob_id": "5cabe2c23004a4910eeae75f2d1ad35a83e27810", "content_id": "6f00fccdaa164ac5277ee4b6523a72614f3f6e11", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1242, "license_type": "no_license", "max_line_length": 56, "num_lines": 83, "path": "/面试与笔试/笔试/didi/1.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nn = int(input().strip())\ns = sys.stdin.readline().strip().split(' ')\n\ni = 0\nnum = []\nop = []\nwhile i < len(s) and i + 1 < len(s):\n num.append(s[i])\n op.append(s[i+1])\n i += 2\nnum.append(s[i])\n\nres = []\ntmp = []\npre = \"\"\nfor i in range(len(op)):\n if i == 0:\n tmp.append(num[i])\n pre = op[i]\n else:\n if op[i] == \"+\" or op[i] == \"-\":\n if pre == \"+\" or pre == \"-\":\n tmp.append(num[i])\n else:\n tmp.append(num[i])\n res += sorted(tmp, key=lambda x: int(x))\n tmp = []\n pre = op[i]\n else:\n if pre == \"*\" or pre == \"/\":\n tmp.append(num[i])\n else:\n res += sorted(tmp, key=lambda x: int(x))\n tmp = []\n tmp.append(num[i])\n\n pre = op[i]\n\ntmp.append(num[-1])\nres += sorted(tmp, key=lambda x: int(x))\n\nj = i = 0\nwhile i < len(op):\n res = res[:j+1] + [op[i]] + res[j+1:]\n j += 2\n i += 1\nprint(' '.join(res))\n\n\n\n\"\"\"\n6\n3 + 2 + 1 + -4 * -5 + 1\n\n\n3\n1 + 2 + 3\n\n7\n3 + 2 + 1 + -4 * -5 * 1 + 2\n\n8\n3 + 2 + 1 + -4 * -5 * 1 + 2 + 1\n\n3\n1 + 3 / 2\n\n5\n3 * 4 / 2 + 5 * 1\n\n1\n3\n\n2\n2 * 1\n\n4\n1 + 3 * 2 * 1\n\n4\n1 - 6 - 5 + 4\n\"\"\"\n\n\n\n" }, { "alpha_fraction": 0.46327683329582214, "alphanum_fraction": 0.4755178987979889, "avg_line_length": 24.214284896850586, "blob_id": "3106df0ec0a6d2db72ed058e07630b20652f7864", "content_id": "7692c99b6178cc0574de5fec2bbd53c598ee5438", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1312, "license_type": "no_license", "max_line_length": 59, "num_lines": 42, "path": "/offer/32.1把二叉树打印成多行(分行从上到下打印二叉树).py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n题目描述\n从上到下按层打印二叉树,同一层结点从左至右输出。每一层输出一行。\n\"\"\"\n# class TreeNode:\n# def __init__(self, x):\n# self.val = x\n# self.left = None\n# self.right = None\n\nclass Solution:\n # 返回二维列表[[1,2],[4,5]]\n def Print(self, pRoot):\n # write code here\n if not pRoot:\n return []\n res = []\n queue = [pRoot]\n cur_level = 1\n next_level = 0\n row = []\n while queue:\n node = queue.pop(0)\n row.append(node.val)\n cur_level -= 1\n if node.left:\n queue.append(node.left)\n next_level += 1\n if node.right:\n queue.append(node.right)\n next_level += 1\n if cur_level == 0:\n res.append(row)\n cur_level = next_level\n next_level = 0\n row = []\n return res\n\n# 另外一种方法,但是其实不可取,没有真正的把queue当成队列,还是当成list,就是先根据index\n# 遍历queue把其中的元素都加入row,并记录此时的长度n,即为这一层的个数,然后res.append(row),\n# 然后再弹出n个,同时加入下一层。\n\n\n\n" }, { "alpha_fraction": 0.43579766154289246, "alphanum_fraction": 0.46108949184417725, "avg_line_length": 20.375, "blob_id": "8a4de408efc7764782b43cde953b776e33006ed2", "content_id": "8eabf4e9a863fe6d66da4955090971982b1281e4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 514, "license_type": "no_license", "max_line_length": 62, "num_lines": 24, "path": "/面试与笔试/笔试/360-2/22.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\n\nline = list(map(int, sys.stdin.readline().strip().split(\" \")))\nN = line[0]\nM = line[1]\nline = list(map(int, sys.stdin.readline().strip().split(\" \")))\n\n# i = 0\n# j = M\n# res = sum(line[i:j]) // M\nres = 0\n# while i < N-M + 1:\n# j = i + M\n# while j < N:\n# if sum(line[i:j]) / (j-i) > res:\n# res = sum(line[i:j]) / (j-i)\n# j += 1\n# i += 1\nfor i in range(0, N-M+1):\n for j in range(i+M-1, N):\n res = max(res, sum(line[i:j+1])/(j-i+1))\n\n\nprint(\"%.3f\" %res)\n\n" }, { "alpha_fraction": 0.5053571462631226, "alphanum_fraction": 0.5249999761581421, "avg_line_length": 22.33333396911621, "blob_id": "a2fef44f8d64158f0e3bed9df25295121b16860e", "content_id": "d585ed5426560f7d9e85ff219c7b95c148c2fbbb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 560, "license_type": "no_license", "max_line_length": 62, "num_lines": 24, "path": "/面试与笔试/笔试/hulu/1.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\n\nline = list(map(int, sys.stdin.readline().strip().split(' ')))\nn = line[0]\nm = line[1]\na = list(map(int, sys.stdin.readline().strip().split(' ')))\nw = list(map(int, sys.stdin.readline().strip().split(' ')))\n\ndef last_remain(n, m):\n if n < 1 or m < 1:\n return -1\n last = 0\n for i in range(2, n+1):\n last = (last + m) % i\n return last\nlast = last_remain(n, m)\n\nprob = 0\nsum_w = sum(w)\nfor j in range(n):\n for i in range(n):\n if a[i] == 1 and ((i-j)+n)%n == last:\n prob += w[j]/sum_w\nprint('%.5f' %prob)\n" }, { "alpha_fraction": 0.4925714433193207, "alphanum_fraction": 0.524571418762207, "avg_line_length": 26.375, "blob_id": "c5f213e26a4e54ad88eb2d18351d208968ff5fa1", "content_id": "356516e0d93561dd74d4782ba2ff2727807c7e9b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1019, "license_type": "no_license", "max_line_length": 70, "num_lines": 32, "path": "/offer/17.打印从1到最大的n位数.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n输入数字n,按顺序打印出从1到最大的n位十进制数,比如输入3,则打印出1,2,3一直到最大的3位数999\n\"\"\"\nclass Solution:\n def pring_1_to_max_of_digits1(self, n):\n # 法一:直接打印,可能会遇到大数问题,当n很大的时候\n if n <= 0:\n return\n num = 1\n for i in range(n):\n num *= 10\n for i in range(num):\n print(i)\n def pring_1_to_max_of_digits(self, n):\n # 法二:数字排列解法\n if n <= 0:\n return\n num = [0]*n\n self.pring_1_to_max_of_digits_recursively(num, n, 0)\n\n def pring_1_to_max_of_digits_recursively(self, num, n, index):\n if index == n:\n self.print_num(num)\n return\n for i in range(10):\n num[index] = str(i)\n self.pring_1_to_max_of_digits_recursively(num, n, index+1)\n\n def print_num(self, num):\n print(''.join(num).lstrip('0'))\n\nSolution().pring_1_to_max_of_digits(3)" }, { "alpha_fraction": 0.3776104152202606, "alphanum_fraction": 0.44060254096984863, "avg_line_length": 31.065933227539062, "blob_id": "399bb22a18ecb9bf665f28ecd95e40884927cecb", "content_id": "6301a31d111d296410aa54731f3dbd9996bb2544", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3089, "license_type": "no_license", "max_line_length": 142, "num_lines": 91, "path": "/查找/矩阵中1的块数-DFS-BFS.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n给一个m * n的01矩阵,如果矩阵中若干个1是相邻的,那么称这些1是一个相邻的“块”。求给定矩阵中块的个数\n输入样例(8 * 7的01矩阵):\n0 1 1 1 0 0 1\n0 0 1 0 0 0 0\n0 0 0 0 1 0 0\n0 0 0 1 1 1 0\n1 1 1 0 1 0 0\n1 1 1 1 0 0 0\n0 0 0 0 1 0 1\n1 0 1 1 0 1 0\n该矩阵中“块”的个数就为:9\nhttps://blog.csdn.net/y_dd6011/article/details/89715169\n\"\"\"\nfrom queue import Queue\nx = [0, 0, -1, 1]\ny = [1, -1, 0, 0]\ndef bsf(matrix):\n visited = [[False]*len(matrix[0]) for _ in range(len(matrix))]\n res = 0\n for i in range(len(matrix)):\n for j in range(len(matrix[0])):\n if not visited[i][j] and matrix[i][j] == 1:\n res += 1\n queue = Queue()\n queue.put([i,j])\n visited[i][j] = True\n while not queue.empty():\n index = queue.get()\n visited[index[0]][index[1]] = True\n for k in range(0,4):\n tmp_x = index[0] + x[k]\n tmp_y = index[1] + y[k]\n if 0 <= tmp_x < len(matrix) and 0 <= tmp_y < len(matrix[0]) and not visited[tmp_x][tmp_y] and matrix[tmp_x][tmp_y]==1:\n visited[tmp_x][tmp_y] = True\n queue.put([tmp_x, tmp_y])\n return res\n\n# 深度优先法1\ndef _dfs(matrix, vis, i, j):\n vis[i][j] = True\n for k in range(0, 4):\n tmp_x = i + x[k]\n tmp_y = j + y[k]\n if 0 <= tmp_x < len(matrix) and 0 <= tmp_y < len(matrix[0]) and not vis[tmp_x][tmp_y] and matrix[tmp_x][\n tmp_y]==1:\n _dfs(matrix, vis, tmp_x, tmp_y)\ndef dfs(matrix):\n vis = [[False]*len(matrix[0]) for _ in range(len(matrix))]\n res = 0\n for i in range(len(matrix)):\n for j in range(len(matrix[0])):\n if not vis[i][j] and matrix[i][j] == 1:\n res += 1\n _dfs(matrix,vis, i, j)\n return res\n\n# 深度优先法2, 不太对,不是dfs\ndef dfs2(matrix):\n vis = [[False] * len(matrix[0]) for _ in range(len(matrix))]\n res = 0\n for i in range(len(matrix)):\n for j in range(len(matrix[0])):\n if not vis[i][j] and matrix[i][j] == 1:\n res += 1\n stack = [[i,j]]\n vis[i][j] = True\n while stack:\n index = stack.pop()\n for k in range(0, 4):\n tmp_x = index[0] + x[k]\n tmp_y = index[1] + y[k]\n if 0 <= tmp_x < len(matrix) and 0 <= tmp_y < len(matrix[0]) and not vis[tmp_x][tmp_y] and matrix[tmp_x][tmp_y] == 1:\n vis[tmp_x][tmp_y] = True\n stack.append([tmp_x, tmp_y])\n return res\n\n\n\n\nmatrix = [[0, 1, 1, 1, 0, 0, 1],\n[0, 0, 1, 0, 0, 0, 0],\n[0, 0, 0, 0, 1, 0, 0],\n[0, 0, 0, 1, 1, 1, 0],\n[1, 1, 1, 0, 1, 0, 0],\n[1, 1, 1, 1, 0, 0, 0],\n[0, 0, 0, 0, 1, 0, 1],\n[1, 0, 1, 1, 0, 1, 0]]\nprint(bsf(matrix))\nprint(dfs(matrix))\nprint(dfs2(matrix))\n\n\n\n" }, { "alpha_fraction": 0.4810495674610138, "alphanum_fraction": 0.5189504623413086, "avg_line_length": 24.121952056884766, "blob_id": "5732891acecd40335a2ee84f11d44e1d94be317b", "content_id": "b03fd744e17306904ad7020edd011f07a33953f2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1271, "license_type": "no_license", "max_line_length": 63, "num_lines": 41, "path": "/offer/56.1数组中唯一只出现一次的数字.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n'''\n题目描述\n在一个数组中除一个数字只出现一次外,其他都出现了三次,找出这个只出现一次的\n'''\n\n\n# 负数会出问题\nclass Solution:\n # 如果一个数字出现三次,那么它的二进制表示的每一位(0或1)也出现三次,如果把所有出现三次的数字的二进制表示的每一位\n # 都分别加起来,那么每一位的和都能被3整除\n def FindNumsAppearOnce(self, array):\n if not array:\n return None\n\n bit_sum = [0] * 32\n for i in range(len(array)):\n tmp = array[i]\n index = 0\n while tmp:\n if tmp & 1:\n bit_sum[index] += 1\n tmp = tmp >> 1\n index += 1\n\n res = 0\n # print(bit_sum)\n for i in range(len(bit_sum)-1, -1, -1):\n # res = res << 1\n # res += bit_sum[i] % 3\n res = (res << 1) + bit_sum[i] % 3\n # print(res)\n\n\n return res\n\nprint(Solution().FindNumsAppearOnce([1,2,3,1,1,2,2,4,4,4]))\nprint(Solution().FindNumsAppearOnce([]))\nprint(Solution().FindNumsAppearOnce([1]))\nprint(Solution().FindNumsAppearOnce([0,1,1,1]))\nprint(Solution().FindNumsAppearOnce([-1,1,1,1]))" }, { "alpha_fraction": 0.37051793932914734, "alphanum_fraction": 0.4003984034061432, "avg_line_length": 18.346153259277344, "blob_id": "973059565dc1bc255f6fbf62ad40b5895a71353c", "content_id": "e5c4168db654a86c899782cb3d74ee1ccdbb672b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 502, "license_type": "no_license", "max_line_length": 75, "num_lines": 26, "path": "/面试与笔试/笔试/iqiyi/3.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\n\nclass Solution:\n def f(self, g,n,m):\n if n < 0.9:\n return 0\n out = 0.0\n out += g*(n/(n+m))\n g = g*(m/(n+m))\n m -= 1\n\n if m < 0.9:\n return out\n g = g * (m / (n + m))\n m -= 1\n out = out + self.f(g, n, m-1)*(m/(n+m))+self.f(g, n-1, m)*(n/(n+m))\n return out\n\na = list(map(int, sys.stdin.readline().strip().split(' ')))\nn = a[0]\nred = n\nm = a[1]\nblue = m\nout = Solution().f(1,n,m)\n\nprint(\"%.5f\" %out)" }, { "alpha_fraction": 0.2738095223903656, "alphanum_fraction": 0.3849206268787384, "avg_line_length": 20.08333396911621, "blob_id": "a9ea35d9ee20ae998a17bcae8159e289b2795df4", "content_id": "4fc196fd3dd04febd02743620cee851737506413", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 252, "license_type": "no_license", "max_line_length": 38, "num_lines": 12, "path": "/面试与笔试/笔试/tt/2.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "n = int(input().strip())\nif n == 0:\n print(0)\nelse:\n f = [0] * (n + 2)\n f[0] = 1\n f[2] = 1\n # f[4] = 2\n for i in range(4, n+1, 2):\n for j in range(0, i-2 + 1, 2):\n f[i] += f[j] * f[i-2-j]\n print(f[n] % 1000000007)" }, { "alpha_fraction": 0.5209261178970337, "alphanum_fraction": 0.5271593928337097, "avg_line_length": 23.933332443237305, "blob_id": "bffa771c8d0d6781bece33c674eb20b17b705474", "content_id": "5da731882ac3a1bb3e4e1323a74797106e2f9724", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1325, "license_type": "no_license", "max_line_length": 58, "num_lines": 45, "path": "/offer/6.从尾到头打印链表.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n输入一个链表,按链表值从尾到头的顺序返回一个ArrayList。\n\"\"\"\n# -*- coding:utf-8 -*-\n# class ListNode:\n# def __init__(self, x):\n# self.val = x\n# self.next = None\n\nclass Solution:\n # 返回从尾部到头部的列表值序列,例如[1,2,3]\n def printListFromTailToHead1(self, listNode):\n # 法一:不该变链表结构,使用栈\n stack = []\n p = listNode\n while p:\n stack.append(p.val)\n p = p.next\n return stack[::-1]\n # stack.reverse()\n # return stack\n\n def printListFromTailToHead2(self, listNode):\n # 法二:不改变链表结构,使用递归,递归本质上是一个栈结构\n res = []\n if not listNode:\n return res\n tail = self.printListFromTailToHead(listNode.next)\n res += tail\n res.append(listNode.val)\n return res\n\n def printListFromTailToHead(self, listNode):\n # 法三:改变链表结构,把链表进行reverse,使用迭代\n pre = None\n while listNode:\n next = listNode.next\n listNode.next = pre\n pre = listNode\n listNode = next\n res = []\n while pre:\n res.append(pre.val)\n pre = pre.next\n return res\n\n" }, { "alpha_fraction": 0.6305969953536987, "alphanum_fraction": 0.638059675693512, "avg_line_length": 28.77777862548828, "blob_id": "ba170e74911fe2a8a00d158c5e5d33c4ad3dc9cc", "content_id": "4df97e7eeb580e39daa13947ea7f5beffe4e1f2e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 406, "license_type": "no_license", "max_line_length": 118, "num_lines": 9, "path": "/面试与笔试/counter函数.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "from collections import Counter\n\na = ['主营业务:', 'O2O', '车主服务', '驾考培训', '互联网驾考', '汽车交通支撑服务', '汽车交通支撑服务', '汽车交通支撑服务', '汽车交通支撑服务', '驾考培训', '驾考培训', '主营业务:']\n# 统计词频\nresult = Counter(a)\nprint(result)\n# 排序\nd = sorted(result.items(), key=lambda x: x[1], reverse=True)\nprint(d)\n" }, { "alpha_fraction": 0.523409366607666, "alphanum_fraction": 0.5540215969085693, "avg_line_length": 38.69047546386719, "blob_id": "50f06db74fec7b6dbb5f925b73bafde412968103", "content_id": "968ec71651af3ef35d36876f9f9fc1ba5258c230", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1896, "license_type": "no_license", "max_line_length": 85, "num_lines": 42, "path": "/回溯法查找路径ljh.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "class Solution:\n def path(self, matrix, rows, cols, row, col):\n visited = [[False for i in range(rows)] for j in range(cols)] # 记录是否已经路过了\n res = [] # 保存结果,在python里面将res作为下面的函数的参数是按引用传递的\n self.path_core(matrix, rows, cols, row, col, visited, res)\n return res\n\n def path_core(self, matrix, rows, cols, row, col, visited, res):\n has = False\n if not (0 <= row < rows) or not (0 <= col < cols): # 判断坐标是否满足要求\n return has\n if matrix[row][col] == 1 and row != 0 and col != 0: # 如果找到值为1的点,则成功了,说明存在路径\n res.append((row, col)) # 把改点放到res中,即终点坐标放到res里面\n return True\n\n if matrix[row][col] != -1 and not visited[row][col]: # 如果可以走,且没走过\n res.append((row, col))\n visited[row][col] = True\n has = self.path_core(matrix, rows, cols, row - 1, col, visited, res) or \\\n self.path_core(matrix, rows, cols, row + 1, col, visited, res) or \\\n self.path_core(matrix, rows, cols, row, col - 1, visited, res) or \\\n self.path_core(matrix, rows, cols, row, col + 1, visited, res)\n if not has: # 如果不存在路径,则把先前的都弹出\n res.pop() # 弹出最后一个元素\n visited[row][col] = True\n return has\n\n\nmatrix = [[1,-1,-1],[0,0,1]]\nrows = len(matrix)\ncols = len(matrix[0])\nprint(Solution().path(matrix, rows, cols, 0, 0))\n\nmatrix = [[1,-1,-1],[0,0,-1],[0,0,1]]\nrows = len(matrix)\ncols = len(matrix[0])\nprint(Solution().path(matrix, rows, cols, 0, 0))\n\nmatrix = [[1,-1,-1,0],[0,0,-1,-1],[0,0,0,0],[0,0,-1,1]]\nrows = len(matrix)\ncols = len(matrix[0])\nprint(Solution().path(matrix, rows, cols, 0, 0))" }, { "alpha_fraction": 0.5155038833618164, "alphanum_fraction": 0.5213178396224976, "avg_line_length": 23.619047164916992, "blob_id": "03685a19675635a703e4ad20696db4009b559b61", "content_id": "96eb7ec17d6f2a03d5eb9bed35007a19f832226a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 548, "license_type": "no_license", "max_line_length": 53, "num_lines": 21, "path": "/面试与笔试/copyListWithRandomPointer.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# Leetcode 138:对一个有随机指针的链表进行深拷贝\nclass RandomListNode:\n def __init__(self, x):\n self.val = x\n self.next = None\n self.random = None\n\nclass Solution:\n def copyRandomList(self, head):\n p = q = head\n node_dict = {}\n while p:\n node_dict[p] = RandomListNode(p.val)\n p = p.next\n\n while q:\n node_dict[q].next = node_dict[q.next]\n node_dict[q].random = node_dict[q.random]\n q = q.next\n\n return node_dict[head]" }, { "alpha_fraction": 0.5736526846885681, "alphanum_fraction": 0.5832335352897644, "avg_line_length": 41.82051467895508, "blob_id": "0a9ae0685e9ed429a2a053c63fdd0ff5cf572de0", "content_id": "1d0d96cffe1aaa1e6656eed54eade83fc45678b1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2058, "license_type": "no_license", "max_line_length": 112, "num_lines": 39, "path": "/offer/12.矩阵中的路径.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n题目描述\n请设计一个函数,用来判断在一个矩阵中是否存在一条包含某字符串所有字符的路径。路径可以从矩阵中的任意一个格子开始,\n每一步可以在矩阵中向左,向右,向上,向下移动一个格子。如果一条路径经过了矩阵中的某一个格子,则之后不能再次进入这个\n格子。 例如 a b c e s f c s a d e e 这样的3 X 4 矩阵中包含一条字符串\"bcced\"的路径,但是矩阵中不包含\"abcb\"路径,因为\n字符串的第一个字符b占据了矩阵中的第一行第二个格子之后,路径不能再次进入该格子。\n\"\"\"\nclass Solution:\n def hasPath(self, matrix, rows, cols, path):\n # 回溯法\n if not matrix or rows < 1 or cols < 1 or not path:\n return False\n visited = [False] * (rows * cols)\n pathLength = 0\n for i in range(rows):\n for j in range(cols):\n if self.hasPathCore(matrix, rows, cols, i, j, path, pathLength, visited):\n return True\n return False\n\n def hasPathCore(self, matrix, rows, cols, row, col, path, pathLength, visited):\n if pathLength == len(path):\n return True\n has = False\n if 0 <= row < rows and 0 <= col < cols and matrix[row * cols + col] == path[pathLength] and not visited[\n row * cols + col]:\n pathLength += 1\n visited[row * cols + col] = True\n has = self.hasPathCore(matrix, rows, cols, row, col - 1, path, pathLength, visited) or \\\n self.hasPathCore(matrix, rows, cols, row, col + 1, path, pathLength, visited) or \\\n self.hasPathCore(matrix, rows, cols, row - 1, col, path, pathLength, visited) or \\\n self.hasPathCore(matrix, rows, cols, row + 1, col, path, pathLength, visited)\n if not has:\n pathLength -= 1\n visited[row * cols + col] = False\n return has\n\nprint(Solution().hasPath(\"ABCESFCSADEE\",3,4,\"ABCCED\"))\n" }, { "alpha_fraction": 0.4769703149795532, "alphanum_fraction": 0.5056294798851013, "avg_line_length": 23.450000762939453, "blob_id": "5e6b742f255d19fbae38f6862c526629a0350d3d", "content_id": "40b5b113910478ef9180e7d180ce477e637de5d1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 995, "license_type": "no_license", "max_line_length": 55, "num_lines": 40, "path": "/面试与笔试/最长无重复子串长度.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "def get_max_substring(s):\n max_length = 0\n start = 0\n d = {}\n for i in range(len(s)):\n if s[i] in d.keys() and start <= d[s[i]]:\n start = d[s[i]] + 1\n else:\n max_length = max(max_length, i - start + 1)\n d[s[i]] = i\n return max_length\n\n\nprint(get_max_substring(\"pwwkew\"))\nprint(get_max_substring(\"abcabcbb\"))\nprint(get_max_substring(\"bbbbb\"))\n\n\n# https://www.jianshu.com/p/b28aa7ea8e5f\n\n# 还可以使用滑窗来做\n# def get_max_substring1(s):\n# d = {}\n# max_length = 0\n# i = 0\n# j = 0\n# while i < len(s) and j < len(s):\n# if s[j] in d.keys() and d[s[j]] != -1:\n# d[s[i]] = -1\n# i += 1\n# else:\n# d[s[j]] = 1\n# j += 1\n# max_length = max(max_length, j-i)\n# return max_length\n#\n# print(get_max_substring1(\"pwwkew\"))\n# print(get_max_substring1(\"abcabcbb\"))\n# print(get_max_substring1(\"bbbbb\"))\n# https://www.cnblogs.com/kkkky/p/7687083.html" }, { "alpha_fraction": 0.4670846462249756, "alphanum_fraction": 0.48432600498199463, "avg_line_length": 24.559999465942383, "blob_id": "f4000864101cf5a7b528c06fb79433f3d141ef3e", "content_id": "8347c474941d4635c8e377bafee84f036b6b2bc4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 648, "license_type": "no_license", "max_line_length": 48, "num_lines": 25, "path": "/面试与笔试/笔试/pingan/11.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nclass Solution:\n # 二分搜索法\n def binary_search(self, a, b):\n low = 0\n high = a\n while high - low > 1e-7:\n mid = (low + high) / 2\n if abs(pow(mid, b) - a) < 1e-7:\n return mid\n elif pow(mid, b) > a:\n high = mid\n else:\n low = mid\n return mid\n\n# line = sys.stdin.readline().strip().split(\" \")\n# a = float(line[0])\n# b = int(line[1])\n# print(Solution().binary_search(a, b))\nline = sys.stdin.readline().strip()\narr = list(map(int, line.split()))\na = arr[0]\nb = arr[1]\nprint('%.6f' %Solution().binary_search(a, b))" }, { "alpha_fraction": 0.46666666865348816, "alphanum_fraction": 0.5041666626930237, "avg_line_length": 25.703702926635742, "blob_id": "45b407f87330e7e9972aff6ce4435339dc5f785c", "content_id": "c14fbad34c2a978ecc4514070a3133ebe0f46ba8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 720, "license_type": "no_license", "max_line_length": 50, "num_lines": 27, "path": "/offer/53.2 数组数值和下标相等的元素.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "'''\n\n'''\n\nclass Solution:\n def find_first_same(self, data):\n if not data:\n return -1\n left = 0\n right = len(data) - 1\n while left <= right:\n mid = (left + right) // 2\n if data[mid] == mid:\n return mid\n elif data[mid] > mid:\n right = mid - 1\n else:\n left = mid + 1\n return -1\n\nprint(Solution().find_first_same([]))\nprint(Solution().find_first_same([0]))\nprint(Solution().find_first_same([1]))\nprint(Solution().find_first_same([-3,-2,0,1,4,5]))\nprint(Solution().find_first_same([0,2,4,5]))\nprint(Solution().find_first_same([-4,-2,2]))\nprint(Solution().find_first_same([-4,-2,-3,0,1]))" }, { "alpha_fraction": 0.5632295608520508, "alphanum_fraction": 0.575875461101532, "avg_line_length": 20.851064682006836, "blob_id": "db004021ca72b7b64ac985e9c3336fb53d0b3a14", "content_id": "7572cc7664edb12975760f52f4c09c4e813017f3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1266, "license_type": "no_license", "max_line_length": 72, "num_lines": 47, "path": "/offer/59.1队列的最大值.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n定义一个队列并实现函数max得到队列的最大值,要求函数max,push_back,pop_front的时间复杂度都是O(1)\n\"\"\"\n\n\"\"\"\n思路:队列的最大值,使用另外一个队列来存储最大值,当push一个值的时候,如果该值小于队尾的值时,直接放到队尾,如果大于队尾的值时,则将队尾连续\n出队列,直到不大于,然后当前值入队\n\"\"\"\nclass Queue:\n def __init__(self):\n self.queue = []\n self.max_queue = []\n\n def max(self):\n if self.max_queue:\n return self.max_queue[0]\n else:\n return None\n\n def push_back(self, x):\n self.queue.append(x)\n while self.max_queue and x > self.max_queue[-1]:\n self.max_queue.pop()\n self.max_queue.append(x)\n\n def pop_front(self):\n if not self.queue:\n return None\n res = self.queue[0]\n if self.max_queue[0] == res:\n self.max_queue = self.max_queue[1:]\n self.queue = self.queue[1:]\n return res\n\ns = Queue()\nprint(s.pop_front())\ns.push_back(3)\nprint(s.max())\nprint(s.pop_front())\ns.push_back(5)\ns.push_back(2)\ns.push_back(4)\ns.push_back(1)\nprint(s.max())\nprint(s.pop_front())\nprint(s.max())\n\n" }, { "alpha_fraction": 0.5146496891975403, "alphanum_fraction": 0.5210191011428833, "avg_line_length": 23.5625, "blob_id": "ccab9d58e8e1a94bc8a4253ab133dc4049af5909", "content_id": "b92959291d442fa45db96af0a548a5c0f91e97fe", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 971, "license_type": "no_license", "max_line_length": 87, "num_lines": 32, "path": "/offer/38.字符串的排列.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n题目描述\n输入一个字符串,按字典序打印出该字符串中字符的所有排列。例如输入字符串abc,则打印出由字符a,b,c所能排列出来的所有字符串abc,acb,bac,bca,cab和cba。\n输入描述:\n输入一个字符串,长度不超过9(可能有字符重复),字符只包括大小写字母。\n\"\"\"\n\n\nclass Solution:\n def Permutation(self, ss):\n # write code here\n res = []\n if ss == '':\n return res\n\n ss = list(ss)\n self._Permutation(ss, 0, res)\n\n return sorted(res)\n\n def _Permutation(self, ss, begin, res):\n if begin == len(ss) - 1:\n res.append(''.join(ss))\n return\n\n for i in range(begin, len(ss)):\n if ss[begin] == ss[i] and begin != i:\n continue\n ss[begin], ss[i] = ss[i], ss[begin]\n self._Permutation(ss, begin + 1, res)\n ss[begin], ss[i] = ss[i], ss[begin]" }, { "alpha_fraction": 0.5348993539810181, "alphanum_fraction": 0.5483221411705017, "avg_line_length": 23.850000381469727, "blob_id": "c91d0874400c233918247c80ccbeb0911a05ba56", "content_id": "689675d58681abc59d75bc426a50ec7f93c9ca71", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1490, "license_type": "no_license", "max_line_length": 98, "num_lines": 60, "path": "/二叉树的遍历/中序遍历.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "class BiTree:\n def __init__(self, x):\n self.val = x\n self.left = None\n self.right = None\n\nclass SolutionRecursion:\n # def inorder(self, root):\n # res = []\n # if not root:\n # return res\n # self.traversal(root, res)\n # return res\n #\n # def traversal(self, root, res):\n # if not root:\n # return\n # self.traversal(root.left, res)\n # res.append(root.val)\n # self.traversal(root.right, res)\n\n def inorder_traversal(self, root):\n if not root:\n return []\n return self.inorder_traversal(root.left) + [root.val] + self.inorder_traversal(root.right)\n\nclass SolutionCirculation:\n def inorder_traversal(self, root):\n res = []\n if not root:\n return res\n stack = []\n p = root\n while p or len(stack):\n if p:\n stack.append(p)\n p = p.left\n else:\n p = stack.pop()\n res.append(p.val)\n p = p.right\n return res\n\n\n\nroot = BiTree(45)\na = BiTree(12)\nb = BiTree(53)\nroot.left = a\nroot.right = b\na.left = BiTree(3)\na.right = BiTree(37)\na.right.left = BiTree(24)\nb.right = BiTree(100)\nb.right.left = BiTree(61)\nb.right.left.right = BiTree(90)\nb.right.left.right.left = BiTree(78)\nprint(SolutionRecursion().inorder_traversal(root))\n# print(SolutionRecursion().inorder(root))\nprint(SolutionCirculation().inorder_traversal(root))" }, { "alpha_fraction": 0.4267100989818573, "alphanum_fraction": 0.4788273572921753, "avg_line_length": 23.864864349365234, "blob_id": "1aefa589859e400f45cddabe6d1264cbc0760f38", "content_id": "5bd1a7233d151d3d55f63902d2ab6dd5e7e93c6c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 921, "license_type": "no_license", "max_line_length": 47, "num_lines": 37, "path": "/排序/归并排序.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "#-\nclass Solution:\n def merge(self, left, right):\n i, j = 0, 0\n res = []\n while i < len(left) and j < len(right):\n if left[i] <= right[j]:\n res.append(left[i])\n i += 1\n else:\n res.append(right[j])\n j += 1\n res += left[i:] or right[j:]\n return res\n\n def sort(self, num):\n if not num or len(num) <= 1:\n return num\n mid = len(num) >> 1\n left = self.sort(num[:mid])\n right = self.sort(num[mid:])\n res = self.merge(left, right)\n return res\n\nprint(Solution().sort(None))\nprint(Solution().sort([]))\nprint(Solution().sort([1]))\nprint(Solution().sort([1,2,3,4]))\nprint(Solution().sort([4,3,2,1,0]))\nprint(Solution().sort([49,38,65,97,76,13,27]))\nprint(Solution().sort([49,38,65,97,76,13]))\n\na = [1,2]\ndef d(a):\n a[0], a[1] = a[1], a[0]\nd(a)\nprint(a)\n\n" }, { "alpha_fraction": 0.3861566483974457, "alphanum_fraction": 0.4790528118610382, "avg_line_length": 29.5, "blob_id": "a84a4f76ded6f97d120fa33242d321030fe78211", "content_id": "f2c0db41bca2741ba126f76afae05db72001ddf7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 549, "license_type": "no_license", "max_line_length": 69, "num_lines": 18, "path": "/查找/二分(折半)查找.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "class Solution:\n def binary_search(self, num, target):\n if not num:\n return False\n low = 0\n high = len(num) - 1\n while low <= high:\n mid = (low + high) >> 1\n if num[mid] == target:\n return mid\n elif num[mid] < target:\n low = mid + 1\n else:\n high = mid - 1\n return False\n\nprint(Solution().binary_search([5,13,19,21,37,56,64,75,80,88,92],21))\nprint(Solution().binary_search([5,13,19,21,37,56,64,75,80,88,92],85))\n" }, { "alpha_fraction": 0.30760499835014343, "alphanum_fraction": 0.3950056731700897, "avg_line_length": 31.66666603088379, "blob_id": "e00f6ee3317850f3b07bd1d9cfd300200479a1b9", "content_id": "819cdd8133a6ea3c0911c00cfd587e5c2199061e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 881, "license_type": "no_license", "max_line_length": 140, "num_lines": 27, "path": "/面试与笔试/笔试/nvidia/2.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\ns = []\nfor i in range(4):\n s.append(input())\n\nDAY = {'A':'MON', 'B':'TUE', 'C':'WED', 'D':'THU', 'E':'FRI', 'F':'SAT', 'G':'SUN'}\nhour = {'0':'00', '1':'01', '2':'02','3':'03', '4':'04', '5':'05','6':'06', '7':'07', '8':'08',\n '9':'09', 'A':'10', 'B':'11','C':'12', 'D':'13', 'E':'14','F':'15', 'G':'16', 'H':'17',\n 'I':'18', 'J':'19', 'K':'20','L':'21', 'M':'22', 'N':'23'}\n\ncount = 0\none = \"\"\nwhile True:\n if s[0][count] == s[1][count]:\n if one != \"\":\n hh = s[0][count]\n break\n elif s[0][count] >= 'A' and s[0][count] <= 'G':\n one = s[0][count]\n count += 1\ncount = 0\nwhile True:\n if s[2][count] == s[3][count] and ('A'<=s[2][count]<='Z' or 'a'<=s[2][count]<='z') and ('A'<=s[3][count]<='Z' or 'a'<=s[3][count]<='z'):\n break\n count += 1\n\nprint(\"%s %s:%02d\" %(DAY[one], hour[hh], count))" }, { "alpha_fraction": 0.5654450058937073, "alphanum_fraction": 0.6073298454284668, "avg_line_length": 26.285715103149414, "blob_id": "294b7d5ce102a72030141039669e2e4bc5987ec5", "content_id": "898744ea846b344d0c061045365602e214815fb7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 235, "license_type": "no_license", "max_line_length": 62, "num_lines": 7, "path": "/查找/深度-广度优先搜索.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "def dfs(node, v):\n visited[v] = True\n for w in neighbors:\n if not visited[w]:\n dfs(node, w)\n# 参考矩阵总1的块数\n# 参考:https://www.cnblogs.com/icekx/p/9152452.html 但是上面的也不是很标准\n" }, { "alpha_fraction": 0.47193586826324463, "alphanum_fraction": 0.4845360815525055, "avg_line_length": 33.91999816894531, "blob_id": "99d4796f5de9a8c99cde2d72308a369c102e3323", "content_id": "024e14d1dfe4648a28e44ddd4804d4ceecfe7a1a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 873, "license_type": "no_license", "max_line_length": 66, "num_lines": 25, "path": "/面试与笔试/笔试/hw/3333.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nT = int(input())\n\nfor i in range(T):\n res = 0\n max_score = 0\n n = input()\n er = list(map(int, sys.stdin.readline().strip().split(\" \")))\n score = [0]\n left = []\n mid = [i for i in er[:2] if i == er[1]]\n right = []\n score.append(score[-1] + len(left)-len(right))\n for item in range(1, len(er)):\n if er[item] > er[item-1]:\n left += mid\n mid = [i for i in right if i == er[item]] + [er[item]]\n left += [i for i in right if i < er[item]]\n right += [i for i in right if i > er[item]]\n if er[item] < er[item-1]:\n right += (mid + [i for i in left if i > er[item]])\n mid = [i for i in left if i == er[item]] + [er[item]]\n left += [i for i in left if i < er[item]]\n score.append(score[-1]+len(left)-len(right))\n print(max(score), score[-1])\n" }, { "alpha_fraction": 0.27444368600845337, "alphanum_fraction": 0.3971678912639618, "avg_line_length": 24.152542114257812, "blob_id": "9f474d58ff3daf2420fd58a06f21c0d5fe4219d2", "content_id": "f2f15988d15e135e0ae36a90313c8b1a0f586148", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1609, "license_type": "no_license", "max_line_length": 73, "num_lines": 59, "path": "/面试与笔试/求线段集的交集.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\nA = [[0,2],[5,10],[13,23],[24,25]]\nB = [[1,5],[8,12],[15,24],[25,26]]\n\n[[1,2],[5,5],[8,10],[15,23],[24,24],[25,25]]\n\"\"\"\n# 先排序再遍历,O(mn),可以优化成O(m+n)\ndef f(A, B):\n A.sort(key=lambda x:x[0])\n B.sort(key=lambda x:x[0])\n res = []\n for i in range(len(A)):\n for j in range(len(B)):\n if A[i][1] < B[j][0]:\n break\n elif A[i][0] <= B[j][1] and A[i][1] >= B[j][0]:\n res.append([max(A[i][0],B[j][0]), min(A[i][1], B[j][1])])\n else:\n continue\n return res\n\n# O(M+N),每一次不用从B数组的头部开始遍历,而是继续上一次遍历的位置\ndef f2(A, B):\n A.sort(key=lambda x:x[0])\n B.sort(key=lambda x:x[0])\n res = []\n i = 0\n j = 0\n while i < len(A):\n last = 0\n while j < len(B):\n if A[i][1] < B[j][0]:\n break\n elif A[i][0] <= B[j][1] and A[i][1] >= B[j][0]:\n res.append([max(A[i][0],B[j][0]), min(A[i][1], B[j][1])])\n last = j # 要从上一次遍历的最后一个元素开始遍历\n j += 1\n else:\n j += 1\n continue\n j = last\n i += 1\n return res\nA = [[0,2],[5,10],[13,23],[24,25]]\nB = [[1,5],[8,12],[15,24],[25,26]]\nprint(f(A,B))\n\nA = [[0,2],[5,10],[13,23],[24,25]]\nB = [[1,5],[8,12],[15,24],[25,26]]\nprint(f2(A,B))\n\nA = [[24,25],[0,2],[13,23],[5,10]]\nB = [[8,12],[15,24],[1,5],[25,26]]\nprint(f(A,B))\n\nA = [[0,2],[5,10],[13,25],[26,27]]\nB = [[1,5],[8,12],[15,24],[25,26]]\nprint(f(A,B))\nprint(f2(A,B))" }, { "alpha_fraction": 0.46801871061325073, "alphanum_fraction": 0.4988299608230591, "avg_line_length": 34.123287200927734, "blob_id": "cc899bb4f5e4d6449e358ff3b0865776bbb70107", "content_id": "0c54d83aebef714a55c21fd195e31f536426bbb5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2612, "license_type": "no_license", "max_line_length": 95, "num_lines": 73, "path": "/面试与笔试/数组中前k个最小的数.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# 法1:使用快排\nclass Solution:\n def partition(self, nums, low, high):\n pivot = nums[low]\n while low < high:\n while low < high and nums[high] >= pivot:\n high -= 1\n nums[low] = nums[high]\n while low < high and nums[low] <= pivot:\n low += 1\n nums[high] = nums[low]\n nums[low] = pivot\n return low\n\n # def quick_sort_for_topk(self, nums, k, low, high):\n # if low < high:\n # pivot = self.partition(nums, low, high)\n # if pivot + 1 == k:\n # return nums[0:pivot+1]\n # elif pivot + 1 > k:\n # self.quick_sort_for_topk(nums[low:pivot], k, low, pivot-1)\n # else:\n # self.quick_sort_for_topk(nums[pivot+1:high+1], k-pivot-1, pivot+1, high)\n def quick_sort_for_topk(self, nums, k, low, high):\n pivot = self.partition(nums, low, high)\n if pivot + 1 == k:\n return nums[0:pivot + 1]\n elif pivot + 1 > k:\n return self.quick_sort_for_topk(nums, k, low, pivot-1)\n else:\n return self.quick_sort_for_topk(nums, k, pivot+1, high)\n\n def GetLeastNumbers_Solution(self, nums, k):\n if not nums or len(nums) < 1 or k > len(nums) or k <= 0:\n return []\n return sorted(self.quick_sort_for_topk(nums, k, 0, len(nums)-1)) # nowcoder上面要求输出的也是有序的\n\n# 使用堆,不对\nclass Solution2:\n def adjust_down(self, nums, k, size):\n root = nums[k]\n i = 2 * k + 1\n while i < (size):\n if i + 1 < size and nums[i] < nums[i+1]:\n i += 1\n if root >= nums[i]:\n break\n else:\n nums[k] = nums[i]\n k = i\n i = i * 2 + 1\n nums[k] = root\n\n def build_max_heap(self, nums, k):\n size = (k-1) // 2\n for i in range(size, -1, -1):\n self.adjust_down(nums, i, k)\n\n def GetLeastNumbers_Solution(self, nums, k):\n if k == len(nums): return nums\n if k > len(nums) or k <= 0: return []\n self.build_max_heap(nums[0:k], k)\n for i in range(k, len(nums)):\n if nums[i] < nums[0]:\n nums[0] = nums[i]\n self.adjust_down(nums[0:k], 0, k)\n return nums[0:k]\n\n\n# print(Solution().GetLeastNumbers_Solution([4,5,1,6,2,7,3,8], 4))\nprint(Solution2().GetLeastNumbers_Solution([4,5,1,6,2,7,3,8], 4))\nprint(Solution().GetLeastNumbers_Solution([4,5,1,6,2,7,3,8], 6))\nprint(Solution().GetLeastNumbers_Solution([4,5,1,6,2,7,3,8],10))\n" }, { "alpha_fraction": 0.4428044259548187, "alphanum_fraction": 0.45018449425697327, "avg_line_length": 18.071428298950195, "blob_id": "823341c69d1512d2b248d0de6df994655476133d", "content_id": "0f642f63f55f7ceb1bc7080400d2b1535cc70695", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 271, "license_type": "no_license", "max_line_length": 52, "num_lines": 14, "path": "/面试与笔试/笔试/hw/1.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\n\nline = list(sys.stdin.readline().strip().split(\" \"))\nx = list(line[0])\ny = list(line[1])\n\nn = len(y)\nfor i in range(len(x)):\n if i + n > len(x):\n print(\"\".join(x))\n break\n else:\n if x[i:i+n] == y:\n x[i:i+n] = [\"*\"]*n\n\n\n\n\n" }, { "alpha_fraction": 0.375, "alphanum_fraction": 0.5, "avg_line_length": 16, "blob_id": "9b5f8c2fd30d1c1de4190a9596dfcb05c23bbd47", "content_id": "60d84e61e282880ce2c3bd3383e29bd6494aed4c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 16, "license_type": "no_license", "max_line_length": 16, "num_lines": 1, "path": "/京东笔试/2.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "print('%.4f' %3)" }, { "alpha_fraction": 0.3971354067325592, "alphanum_fraction": 0.4049479067325592, "avg_line_length": 20.885713577270508, "blob_id": "afb3b3cdf3abf4048037bab3085ae1752d40539f", "content_id": "37b7e256f675446f3d8eea911d89c41c97997d02", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 768, "license_type": "no_license", "max_line_length": 33, "num_lines": 35, "path": "/京东笔试/test1.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\n# class TreeNode:\n# def __init__(self, x):\n# self.val = x\n# self.left = None\n# self.right = None\ndef count(d, key, n):\n if key not in d.keys():\n return 0\n for val in d[key]:\n n += count(d, val, n)\n\nif __name__ == '__main__':\n n = int(input())\n node = []\n d = {}\n for i in range(n-1):\n x, y = input().split(' ')\n x, y = int(x), int(y)\n if x > y:\n key = y\n value = x\n else:\n key, value = x, y\n if key not in d.keys():\n d[key] = [value]\n else:\n d[key].append(value)\n\n # # print(d)\n # num = len(d[1])\n # for val in d[1]:\n # count(d, val, num)\n # for val in d[1]:\n print(len(d)+1)\n\n\n" }, { "alpha_fraction": 0.4602316617965698, "alphanum_fraction": 0.4911196827888489, "avg_line_length": 24.39215660095215, "blob_id": "5f3d9b92e0c2ec17e7b7bc22c43956042d622a3c", "content_id": "01368c27287e91981e49ee228bff1f96672df31a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1499, "license_type": "no_license", "max_line_length": 59, "num_lines": 51, "path": "/offer/56.数组中只出现一次的(两个)数字.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n'''\n题目描述\n一个整型数组里除了两个数字之外,其他的数字都出现了两次。请写程序找出这两个只出现一次的数字。\n'''\n\nclass Solution:\n # 返回[a,b] 其中ab是出现一次的两个数字\n def FindNumsAppearOnce(self, array):\n # write code here\n if not array or len(array) < 2:\n return None\n xor_sum = 0\n for item in array:\n xor_sum ^= item\n\n # 找到一位为1的,下面这种方法是错误的,这里是十进制表示的!!\n # index = 0\n # for i in range(len(str(xor_sum)) - 1, -1, -1):\n # if str(xor_sum)[i] == '1':\n # index = i - len(str(xor_sum))\n # break\n # if index == 0:\n # return None\n\n # 找到一位为1的\n index = self.find_first_1bit(xor_sum)\n\n # 分成两组\n group1 = 0\n group2 = 0\n for i in range(len(array)):\n if self.is_bit1(array[i], index):\n group1 ^= array[i]\n else:\n group2 ^= array[i]\n return group1, group2\n\n def find_first_1bit(self, xor_sum):\n index = 0\n while xor_sum:\n xor_sum = xor_sum >> 1\n index += 1\n return index\n\n def is_bit1(self, num, index):\n num = num >> (index - 1)\n return num & 1\n\nprint(Solution().FindNumsAppearOnce([1,2]))\nprint(Solution().FindNumsAppearOnce([1,1,2,2,3,4,4,5,6,6]))\n" }, { "alpha_fraction": 0.4174816608428955, "alphanum_fraction": 0.44254279136657715, "avg_line_length": 22.042253494262695, "blob_id": "ebc3b0ae0ce47538cf3b8715e9db92fde0b10a1b", "content_id": "8b0c8142040d6ce070b5fa08770640ee6f466d21", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3272, "license_type": "no_license", "max_line_length": 65, "num_lines": 142, "path": "/面试与笔试/笔试/tx/3.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# import sys\n# line1 = list(map(int, sys.stdin.readline().strip().split(' ')))\n# n = line1[0]\n# m = line1[1]\n#\n# w = list(map(int, sys.stdin.readline().strip().split(' ')))\n# v = list(map(int, sys.stdin.readline().strip().split(' ')))\n#\n# i = min(w) + 1\n# while i:\n# money = m\n# for j in range(len(w)):\n# if w[j] < i:\n# money -= (i - w[j])*v[j]\n# if money >= 0:\n# i += 1\n# continue\n# else:\n# print(i - 1)\n# break\n\n\n# import sys\n# line1 = list(map(int, sys.stdin.readline().strip().split(' ')))\n# n = line1[0]\n# m = line1[1]\n#\n# w = list(map(int, sys.stdin.readline().strip().split(' ')))\n# v = list(map(int, sys.stdin.readline().strip().split(' ')))\n#\n# i = min(w) + 1\n# while i:\n# for j in range(len(w)):\n# if w[j] < i:\n# m -= (i - w[j])*v[j]\n# w[j] += 1\n# if m >= 0:\n# i += 1\n# continue\n# else:\n# print(i - 1)\n# break\n\n# import sys\n# line1 = list(map(int, sys.stdin.readline().strip().split(' ')))\n# n = line1[0]\n# m = line1[1]\n#\n# w = list(map(int, sys.stdin.readline().strip().split(' ')))\n# v = list(map(int, sys.stdin.readline().strip().split(' ')))\n#\n# map_ = [list(item) for item in zip(w, v)]\n# map_.sort()\n# j = min(w) + 1\n# # while j:\n# # for i in range(len(map_)):\n# # if map_[i][0] < j:\n# # m -= (j - map_[i][0]) * map_[i][1]\n# # map_[i][0] += 1\n# # else:\n# # break\n# # if m >= 0:\n# # j += 1\n# # continue\n# # else:\n# # print(j - 1)\n# # break\n# while m >= 0:\n# for i in range(len(map_)):\n# m = m - map_[i][1]\n# map_[i][0] += 1\n# if i < len(map_)-1 and map_[i+1][0] < map_[i][0]:\n# continue\n# else:\n# break\n# print(map_[0][0]-1)\n\n\n\n# import sys\n# line1 = list(map(int, sys.stdin.readline().strip().split(' ')))\n# n = line1[0]\n# m = line1[1]\n#\n# w = list(map(int, sys.stdin.readline().strip().split(' ')))\n# v = list(map(int, sys.stdin.readline().strip().split(' ')))\n#\n# map_ = [list(item) for item in zip(w, v)]\n# map_.sort()\n# j = min(w) + 1\n#\n# while m >= 0:\n# for i in range(len(map_)):\n# m = m - map_[i][1]\n# map_[i][0] += 1\n# if i < len(map_)-1 and map_[i+1][0] < map_[i][0]:\n# continue\n# else:\n# break\n# print(map_[0][0]-1)\n\n\nimport sys\nline1 = list(map(int, sys.stdin.readline().strip().split(' ')))\nn = line1[0]\nm = line1[1]\n\nw = list(map(int, sys.stdin.readline().strip().split(' ')))\nv = list(map(int, sys.stdin.readline().strip().split(' ')))\n\nthe_max = max(w)\nthe_min = min(w)\n\n\n\nfor i in range(n):\n gap = 0\n mid = (the_max + the_min) // 2\n if w[i] < mid:\n gap += (mid - w[i]) * v[i]\n if gap == m:\n ans = mid\n break\n elif gap < m:\n the_min = mid + 1\n ans = mid\n elif gap > m:\n the_max = mid - 1\nprint(ans)\n# map_ = [list(item) for item in zip(w, v)]\n# map_.sort()\n# j = min(w) + 1\n#\n# while m >= 0:\n# for i in range(len(map_)):\n# m = m - map_[i][1]\n# map_[i][0] += 1\n# if i < len(map_)-1 and map_[i+1][0] < map_[i][0]:\n# continue\n# else:\n# break\n# print(map_[0][0]-1)\n" }, { "alpha_fraction": 0.4574821889400482, "alphanum_fraction": 0.5263658165931702, "avg_line_length": 31.890625, "blob_id": "d399ae0d769cbcc74a3cae134a0434e85a2a5849", "content_id": "466a37264df5c9207bb05b574d509e7b75ec2959", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2131, "license_type": "no_license", "max_line_length": 64, "num_lines": 64, "path": "/查找/查找数组中的第k个元素.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "class Solution:\n def partition(self, nums, low, high):\n pivot = nums[low]\n while low < high:\n while low < high and nums[high] >= pivot:\n high -= 1\n nums[low] = nums[high]\n while low < high and nums[low] <= pivot:\n low += 1\n nums[high] = nums[low]\n nums[low] = pivot\n return low\n\n def find(self, nums, low, high, k):\n pivot = self.partition(nums, low, high)\n if pivot + 1 == k:\n return nums[pivot]\n elif pivot + 1 > k:\n return self.find(nums, low, pivot-1, k)\n else:\n return self.find(nums, pivot+1, high, k)\n\n def findKth(self, nums, k):\n if k < 0 or not nums:\n return False\n return self.find(nums, 0, len(nums)-1, k)\n\n # 迭代法\n def findKthCirculation(self, nums, k):\n if not nums or k < 0 or k > len(nums):\n return\n start = 0\n end = len(nums)-1\n index = self.partition(nums, start, end)\n while index != k - 1:\n if index > k - 1:\n end = index - 1\n index = self.partition(nums, start, end)\n else:\n start = index + 1\n index = self.partition(nums, start, end)\n # 下面三行求最小的k个数\n # res = []\n # for i in range(0, k):\n # res.append(nums[i])\n\n res = nums[index]\n return res\n\n\n\n\nprint(Solution().findKth([1,2,3,2,2,5],4))\nprint(Solution().findKthCirculation([1,2,3,2,2,5],4))\nprint(Solution().findKth([1,1,1,1,1,1,1,1,11,0],4))\nprint(Solution().findKthCirculation([1,1,1,1,1,1,1,1,11,0],4))\nprint(Solution().findKth([1,2,3,4,5,6,0],4))\nprint(Solution().findKthCirculation([1,2,3,4,5,6,0],4))\nprint(Solution().findKth([6,3,5,1,2,4,7,8,9,10],4))\nprint(Solution().findKthCirculation([6,3,5,1,2,4,7,8,9,10],4))\nprint(Solution().findKth([6,3,5,1,2,4,7,8,9,10],7))\nprint(Solution().findKthCirculation([6,3,5,1,2,4,7,8,9,10],7))\nprint(Solution().findKth([6,3,5,1,2,4,7,8,9,10,0],9))\nprint(Solution().findKthCirculation([6,3,5,1,2,4,7,8,9,10,0],9))\n" }, { "alpha_fraction": 0.46074381470680237, "alphanum_fraction": 0.5144628286361694, "avg_line_length": 23.200000762939453, "blob_id": "d7f57384b7482e1c49b65893ccd71f1646a7df52", "content_id": "719ffab522ac8ee6775e48cd6b9366ce0a39e8cd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1202, "license_type": "no_license", "max_line_length": 53, "num_lines": 40, "path": "/offer/15.二进制中1的个数.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\nclass Solution:\n def NumberOf1_1(self, n):\n # 是一种有问题的解法\n # python中并没有对int类型的位数进行限制\n # 但是对于有符号数,例如为负数的时候,统计会出错,因为负数右移左边添加的是1\n # 例如输入-1,输出为32\n # count = 0\n # for i in range(32):\n # count += (n >> i)&1\n # return count\n return sum([(n >> i & 1) for i in range(32)])\n\n def NumberOf1(self, n):\n # 法二:为了避免上面的问题,采取对1进行移位\n # 这种方法需要循环32位\n count = 0\n for i in range(32):\n if n & (1 << i):\n count += 1\n return count\n\n def NumberOf1_2(self, n):\n # 法三:这种算法n中有几个1则需要循环几次\n # 也报错\n count = 0\n while n:\n count += 1\n n = (n - 1) & n\n return count\n\n\nprint(Solution().NumberOf1(-1))\nprint(Solution().NumberOf1(-127))\n# print(Solution().NumberOf1(10))\n# print(Solution().NumberOf1(8))\n# print(Solution().NumberOf1(15))\n# print(Solution().NumberOf1(1232132))\n\nprint(bin(-1))\n" }, { "alpha_fraction": 0.5567970275878906, "alphanum_fraction": 0.575419008731842, "avg_line_length": 29.714284896850586, "blob_id": "879e642e18eb2ca971b507b20f0d980cacf61c77", "content_id": "c6d82aeac632ad57f58c1041b1438a64951e5fb5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1336, "license_type": "no_license", "max_line_length": 65, "num_lines": 35, "path": "/offer/48.最长不含重复字符串的子字符串.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n找给定字符串中的最长不含重复字符串的子字符串\n\"\"\"\n\"\"\"\n思路一:暴力求解,子字符串有N^2个,时间复杂度为O(n^3)\n思路二:动态规划,dp[i]表示以当前字符结尾的子字符串的长度,\n当当前字符没有出现在以i-1结尾的字串中时,dp[i] = dp[i-1] + 1\n当出现在i-1结尾的时,dp[i] = 这两个相同字符之间的长度,需要一个数组保存字符最后出现的位置\n\"\"\"\n\n\nclass Solution:\n def longestSubstringWithoutDuplication(self, s):\n if not s:\n return 0\n ch = [-1] * 26\n dp = [0] * len(s)\n dp[0] = 1\n ch[ord(s[0]) - ord('a')] = 0\n max_len = dp[0]\n for i in range(1, len(s)):\n if i - ch[ord(s[i]) - ord('a')] > dp[i-1]:\n dp[i] = dp[i-1] + 1\n else:\n dp[i] = i - ch[ord(s[i]) - ord('a')]\n ch[ord(s[i]) - ord('a')] = i\n # print(dp[i])\n max_len = max(max_len, dp[i])\n return max_len\n\nprint(Solution().longestSubstringWithoutDuplication(\"arabcacfr\"))\nprint(Solution().longestSubstringWithoutDuplication(\"a\"))\nprint(Solution().longestSubstringWithoutDuplication(\"aaaaaaa\"))\nprint(Solution().longestSubstringWithoutDuplication(\"abcdefg\"))\nprint(Solution().longestSubstringWithoutDuplication(\"\"))" }, { "alpha_fraction": 0.6031372547149658, "alphanum_fraction": 0.610196053981781, "avg_line_length": 24.479999542236328, "blob_id": "639406c9ee69b1902d25d28ca72424a94f216c8b", "content_id": "a51dce886c15baa701c6e618458dd980f757badc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1749, "license_type": "no_license", "max_line_length": 98, "num_lines": 50, "path": "/offer/18.删除链表的节点.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n在O(1)时间内删除链表节点\n给定单项链表的头指针和一个节点指针,定义一个函数在O(1)时间内删除该节点\n\"\"\"\n\nclass ListNode:\n def __init__(self, val):\n self.val = val\n self.next = None\n\nclass Solution:\n def delete_node(self, head, node):\n # 最容易想到的方法就是从头往后遍历,是因为要得到被删除节点的前一个节点,但是时间是O(n)\n # 其实也可以把被删除节点的后一个节点的内容复制到要删除的节点上,然后删除下一个节点,不过\n # 这样就改变了链表的原始结构了(并不算),但是复杂度为O(1)\n # 此时还要考虑一些特殊情况:\n # 1. 删除的是尾节点,此时下一个节点为空,此时就只能从头遍历了\n # 2. 只有一个节点,且要删除这个节点,即删除头结点/尾节点,删除这个节点且将头结点设为空\n if not head or not node:\n return\n if node.next:\n node.val = node.next.val\n node.next = node.next.next\n elif not head.next and head == node:\n head = None\n\n else:\n p = head\n while p.next != node:\n p = p.next\n p.next = None\n\n return head\n\np = head = ListNode(1)\n# print(head.val, head.next)\np.next = ListNode(2)\n# print(head.val,head.next.val, head.next)\np = p.next\np.next = ListNode(3)\n# print(head.val, head.next.val, head.next.next.val, head.next.next)\np = p.next\np.next = ListNode(4)\np = p.next\n# print(head.val, head.next.val, head.next.next.val, head.next.next.next.val, head.next.next.next)\n\nhead = Solution().delete_node(head, p)\nwhile head:\n print(head.val)\n head = head.next\n\n" }, { "alpha_fraction": 0.46406251192092896, "alphanum_fraction": 0.49609375, "avg_line_length": 18.393939971923828, "blob_id": "96a965ba0807a5b0bd1e034f0201d773c6bbd66d", "content_id": "b250950173df99253586ee4a1711e070455d8a9b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1280, "license_type": "no_license", "max_line_length": 65, "num_lines": 66, "path": "/面试与笔试/笔试/tx/tencent.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# import sys\n# line1 = list(map(int, sys.stdin.readline().strip().split(' ')))\n# n = line1[0]\n# k = line1[1]\n#\n#\n# h = list(map(int, sys.stdin.readline().strip().split(' ')))\n# min_sum = float(\"inf\")\n# i = 0\n# j = k\n# index = 0\n# while j <= len(h):\n# if min_sum > sum(h[i:j]):\n# index = i\n# min_sum = min(min_sum, sum(h[i:j]))\n# i += 1\n# j += 1\n#\n# print(index+1)\n# import sys\n# line1 = list(map(int, sys.stdin.readline().strip().split(' ')))\n# n = line1[0]\n# k = line1[1]\n#\n# h = list(map(int, sys.stdin.readline().strip().split(' ')))\n# min_sum = float(\"inf\")\n# i = 0\n# j = k\n# index = 0\n# sum_1 = sum(h[i:j])\n# i += 1\n# j += 1\n# while j <= len(h):\n# cur_sum = sum_1 - h[i-1] + h[j-1]\n# if min_sum > cur_sum:\n# index = i\n# min_sum = min(min_sum, cur_sum)\n# i += 1\n# j += 1\n#\n# print(index+1)\n\nimport sys\nline1 = list(map(int, sys.stdin.readline().strip().split(' ')))\nn = line1[0]\nk = line1[1]\n\n\nh = list(map(int, sys.stdin.readline().strip().split(' ')))\n# min_sum = float(\"inf\")\ni = 0\nj = k-1\nindex = 1\nmin_sum = cur_sum = sum(h[i:j+1])\ni += 1\nj += 1\n\nwhile j < len(h):\n cur_sum = cur_sum + h[j] - h[i-1]\n if min_sum > cur_sum:\n index = i\n min_sum = cur_sum\n i += 1\n j += 1\n\nprint(index+1)\n" }, { "alpha_fraction": 0.42881646752357483, "alphanum_fraction": 0.4493996500968933, "avg_line_length": 26.809524536132812, "blob_id": "dbd30f3286c377e1c4ec8909b415d1aeaacf78f9", "content_id": "8e21320775e7b0e071e6c8ee56b491b0cbe6fa32", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 583, "license_type": "no_license", "max_line_length": 66, "num_lines": 21, "path": "/面试与笔试/笔试/hw/33.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nT = int(input())\nfor i in range(T):\n res = 0\n max_score = 0\n n = input()\n nums = list(map(int, sys.stdin.readline().strip().split(\" \")))\n d = {nums[0]: 1}\n for j in range(1, len(nums)):\n tmp = sorted(d.items(), key=lambda x:x[0])\n for item in tmp:\n if item[0] < nums[j]:\n res += item[1]\n elif item[0] > nums[j]:\n res -= 1\n max_score = max(max_score, res)\n if nums[j] in d.keys():\n d[nums[j]] += 1\n else:\n d[nums[j]] = 1\n print(max_score, res)" }, { "alpha_fraction": 0.4446742534637451, "alphanum_fraction": 0.47673216462135315, "avg_line_length": 28.272727966308594, "blob_id": "89939b7a129ab353f10ff01229629b1da9bd94cc", "content_id": "4f80c73c26087381ecdd38acc8941fe86d223451", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 967, "license_type": "no_license", "max_line_length": 42, "num_lines": 33, "path": "/排序/折半插入排序.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# time: O(n^2)\nclass Solution:\n def binary_insert_sort(self, nums):\n for i in range(1, len(nums)):\n low = 0\n high = i-1\n while low <= high:\n mid = (low + high) >> 1\n if nums[mid] == nums[i]:\n high = mid\n break\n elif nums[mid] < nums[i]:\n low = mid + 1\n else:\n high = mid - 1\n tmp = nums[i]\n for j in range(i-1, high, -1):\n nums[j+1] = nums[j]\n nums[high + 1] = tmp\n return nums\n\nnums = []\nprint(Solution().binary_insert_sort(nums))\nnums = [1]\nprint(Solution().binary_insert_sort(nums))\nnums = [1,2,3]\nprint(Solution().binary_insert_sort(nums))\nnums = [4,3,2,1]\nprint(Solution().binary_insert_sort(nums))\nnums = [3,1,2,4,3,0]\nprint(Solution().binary_insert_sort(nums))\nnums = [1,4,5,6,3,4]\nprint(Solution().binary_insert_sort(nums))\n\n" }, { "alpha_fraction": 0.4826536774635315, "alphanum_fraction": 0.49847838282585144, "avg_line_length": 27.824562072753906, "blob_id": "1ca06470981a6dc2089f89f742237ff70e0dfd87", "content_id": "95c5b0000f85de39e5bf8d906aa63d5ac7ceae74", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2269, "license_type": "no_license", "max_line_length": 53, "num_lines": 57, "path": "/offer/4.二维数组中的查找.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n题目描述\n在一个二维数组中(每个一维数组的长度相同),每一行都按照从左到右递增的顺序排序,每一列都按照从上\n到下递增的顺序排序。请完成一个函数,输入这样的一个二维数组和一个整数,判断数组中是否含有该整数。\n\"\"\"\n\n\n# -*- coding:utf-8 -*-\nclass Solution:\n # array 二维列表\n def Find1(self, target, array):\n # 法一:暴力搜索 O(m*n)\n for i in range(len(array)):\n for j in range(len(array[0])):\n if target == array[i][j]:\n return True\n return False\n def Find2(self, target, array):\n # 法二:对每行进行二分搜索,O(nlogm)\n for i in range(len(array)):\n l, r = 0, len(array[i])-1\n while l <= r:\n mid = (l+r)//2\n if array[i][mid] == target:\n return True\n elif array[i][mid] < target:\n l = mid + 1\n else: \n r = mid - 1\n return False\n\n def Find(self, target, array):\n # 法三:矩阵是有序的,可以从左下角A(或右上角)出发,若target小于A,\n # 则必定在上方的行,若大于则必定在右侧的列\n if not array or len(array) == 0:\n return False\n i = len(array)-1\n j = 0\n while i >= 0 and j < len(array[0]):\n if target == array[i][j]:\n return True\n elif target > array[i][j]:\n j += 1\n else:\n i -= 1\n return False\n \n # 思路四:把整个矩阵分成四个块A,B,C,D,左上角的块A中的元素始终小于右下角块D中\n # 的元素,以A中最右下角的元素a作为基准,比较a与target的大小,若a > target,则\n # 在上方或者左方找(A,B,C中,A有重叠),若小于则在下方或右方找(B,C,D中,D有重\n # 叠),但是这样的查找给我们带来了不便,因为每次比较完之后,下次要查找的区域有重叠,\n # 而且没有规律。\n\nprint(Solution().Find(1,None))\nprint(Solution().Find(1,[]))\nprint(Solution().Find(1,[[]]))\nprint(Solution().Find(1,[[0,2,3],[2,4,6]]))\n" }, { "alpha_fraction": 0.46352583169937134, "alphanum_fraction": 0.4893617033958435, "avg_line_length": 26.45833396911621, "blob_id": "aa08f2f06dbda51f2c139f8c3cf5284de0a04ca1", "content_id": "5cec13d06de7b9eb4b2a145ff60ad55708f776d9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 658, "license_type": "no_license", "max_line_length": 68, "num_lines": 24, "path": "/面试与笔试/笔试/mt/1.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\n\nclass Solution:\n def max_value(self, easy, hard):\n if len(easy) == 0:\n return 0\n elif len(easy) == 1:\n return easy[0]\n else:\n res = [0]*len(easy)\n res[0] = easy[0]\n res[1] = max(easy[0] + easy[1], hard[1])\n for i in range(2, len(easy)):\n res[i] = max(res[i-1] + easy[i], res[i-2] + hard[i])\n return res[-1]\n\nN = int(input().strip())\neasy = []\nhard = []\nfor i in range(N):\n line = list(map(float, sys.stdin.readline().strip().split(\" \")))\n easy.append(line[0])\n hard.append(line[1])\nprint(Solution().max_value(easy, hard))" }, { "alpha_fraction": 0.5535714030265808, "alphanum_fraction": 0.6035714149475098, "avg_line_length": 22.41666603088379, "blob_id": "5a156a0c7fe7ca45def4e632bfac95f7d1e140b1", "content_id": "4ecc494597fa5fdbc05dae1262638bce626f6c79", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 394, "license_type": "no_license", "max_line_length": 45, "num_lines": 12, "path": "/offer/15.2判断整数是否是2的次方.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "class Solution:\n # 如果一个整数是2的整数次方,那么他的二进制表示中有且只有移位是1,其他为都为0\n # 将这个整数减去1,再和你自身进行&运算,则得到0\n def f(self, num):\n if not (num-1)&num:\n return True\n else:\n return False\n\nprint(Solution().f(16))\nprint(Solution().f(15))\nprint(Solution().f(1024))" }, { "alpha_fraction": 0.3641536235809326, "alphanum_fraction": 0.4295874834060669, "avg_line_length": 25.074073791503906, "blob_id": "2a2ce784fc1cafc5ba67c669f3e94a4a3260da95", "content_id": "1274dca47117adca6ab83b211e8b37fee43f101a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 703, "license_type": "no_license", "max_line_length": 55, "num_lines": 27, "path": "/排序/冒泡排序.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# time: O(n^2)\nclass Solution:\n def bubble_sort1(self, num):\n for i in range(len(num)-1):\n for j in range(len(num) - 1 - i):\n if num[j] > num[j+1]:\n num[j], num[j+1] = num[j+1], num[j]\n return num\n\n def bubble_sort(self, num):\n i = len(num)-1\n while i > 0:\n last_change = 0\n for j in range(i):\n if num[j] > num[j+1]:\n num[j], num[j+1] = num[j+1], num[j]\n last_change = j\n i = last_change\n return num\n\n\n\n\n# num = [49, 38, 65, 97, 76, 13, 27, 49]\n# num = [1,2,3,1,3,4,2,4,5,6,3]\nnum = [1,2,3,4,5,6]\nprint(Solution().bubble_sort(num))" }, { "alpha_fraction": 0.6836734414100647, "alphanum_fraction": 0.6836734414100647, "avg_line_length": 13.142857551574707, "blob_id": "ba3864957c829d6d363d6a653d0258fc5b735925", "content_id": "bdb621b1be6e6e24a09e4bc1abe8cdf8f6e31998", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 98, "license_type": "no_license", "max_line_length": 31, "num_lines": 7, "path": "/面试与笔试/笔试/pdd/5.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\n\nfor line in sys.stdin:\n print(line)\n\n# lines = sys.stdin.readlines()\n# print(lines)" }, { "alpha_fraction": 0.42351046204566956, "alphanum_fraction": 0.4243156313896179, "avg_line_length": 27.159090042114258, "blob_id": "7772c7a8758488f5f424844a3d44a0d43b34fc5a", "content_id": "6b4346946902abc175df3595f4511bba51b64544", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1394, "license_type": "no_license", "max_line_length": 53, "num_lines": 44, "path": "/offer/32.2按之字形顺序打印二叉树.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n题目描述\n请实现一个函数按照之字形打印二叉树,即第一行按照从左到右的顺序打印,\n第二层按照从右至左的顺序打印,第三行按照从左到右的顺序打印,其他行以此类推。\n\"\"\"\n# class TreeNode:\n# def __init__(self, x):\n# self.val = x\n# self.left = None\n# self.right = None\n\n\nclass Solution:\n def Print(self, pRoot):\n # write code here\n if not pRoot:\n return []\n odd_stack = [pRoot]\n even_stack = []\n res = []\n row = []\n while odd_stack or even_stack:\n if odd_stack:\n while odd_stack:\n node = odd_stack.pop()\n row.append(node.val)\n if node.left:\n even_stack.append(node.left)\n if node.right:\n even_stack.append(node.right)\n res.append(row)\n row = []\n else:\n while even_stack:\n node = even_stack.pop()\n row.append(node.val)\n if node.right:\n odd_stack.append(node.right)\n if node.left:\n odd_stack.append(node.left)\n res.append(row)\n row = []\n return res\n\n\n\n" }, { "alpha_fraction": 0.5586283206939697, "alphanum_fraction": 0.5652654767036438, "avg_line_length": 24.742856979370117, "blob_id": "610072eb90f26e6d0f24c1e6e45dd31b2c022f93", "content_id": "a5806c8cc666695bcf3fa147755b97140d169125", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 904, "license_type": "no_license", "max_line_length": 52, "num_lines": 35, "path": "/面试与笔试/笔试/nvidia/4.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "class Solution:\n def __init__(self):\n self.can = {}\n self.count = 0\n def AddCandidate(self, pCandidateName):\n\n if not pCandidateName.isalpha():\n return 0\n if pCandidateName in self.can.keys():\n return 0\n self.can[pCandidateName] = 0\n self.count += 1\n def Vote(self, pCandidateName):\n if pCandidateName in self.can.keys():\n self.can[pCandidateName] += 1\n def GetVoteResult(self,pCandidateName):\n if not pCandidateName:\n return None\n return self.can[pCandidateName]\n def clear(self):\n pass\n\n\ns = Solution()\na = int(input())\nb = input().split(' ')\nc = int(input())\nd = input().split(' ')\nfor i in range(a):\n s.AddCandidate(b[i])\nfor j in range(c):\n s.Vote(d[j])\nfor i in range(a):\n print(b[i] + \" : \" + str(s.GetVoteResult(b[i])))\nprint(\"Invalid : \" + str(s.count))\n\n\n\n" }, { "alpha_fraction": 0.43944352865219116, "alphanum_fraction": 0.4484451711177826, "avg_line_length": 28.119047164916992, "blob_id": "3b8fcaa91ece7ad4db78cfdc513377c446e9ba1e", "content_id": "c94d90368b2abc21a890d776ebd4293fb31a5d0f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1222, "license_type": "no_license", "max_line_length": 78, "num_lines": 42, "path": "/面试与笔试/笔试/iqiyi/2.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nn = int(input().strip())\na = list(map(int, sys.stdin.readline().strip().split(' ')))\n\n\nclass Solution:\n def Permutation(self, ss):\n # write code here\n res = []\n if ss == '':\n return res\n\n ss = list(ss)\n self._Permutation(ss, 0, res)\n\n return len(res)\n\n def _Permutation(self, ss, begin, res):\n if begin == len(ss) - 1:\n tmp = ''.join(ss)\n flag = True\n for i in range(len(a)):\n if a[i] == 1 and tmp[i] <= tmp[i + 1]:\n flag = False\n break\n elif a[i] == 0 and tmp[i] >= tmp[i + 1]:\n flag = False\n break\n if flag:\n res.append(''.join(ss))\n return\n\n for i in range(begin, len(ss)):\n if ss[begin] == ss[i] and begin != i:\n continue\n ss[begin], ss[i] = ss[i], ss[begin]\n self._Permutation(ss, begin + 1, res)\n ss[begin], ss[i] = ss[i], ss[begin]\n\n# res, count = Solution().Permutation(''.join([str(x) for x in range(1,n+1)]))\ncount = Solution().Permutation(''.join([str(x) for x in range(1,n+1)]))\nprint(count)" }, { "alpha_fraction": 0.5030120611190796, "alphanum_fraction": 0.5240963697433472, "avg_line_length": 34, "blob_id": "fadf588f431c71ba7189d8d422974bf5d5d413c2", "content_id": "78324de6a5c7f99282e4a2a2f6a2ed55a9625f43", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 664, "license_type": "no_license", "max_line_length": 62, "num_lines": 19, "path": "/面试与笔试/笔试/hw/33333.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "a = \"1 3 2 4 3 2\"\ner = [int(i) for i in a.split()]\nscore = [0]\nleft = []\nmid = [er[0]]\nright = []\nscore.append(score[-1]+len(left)-len(right))\nfor item in range(1, len(er)):\n if er[item] > er[item-1]:\n left += mid\n mid = [i for i in right if i == er[item]] + [er[item]]\n left += [i for i in right if i < er[item]]\n right = [i for i in right if i > er[item]]\n if er[item] < er[item-1]:\n right += mid + [i for i in left if i > er[item]]\n mid = [i for i in left if i == er[item]] + [er[item]]\n left = [i for i in left if i < er[item]]\n score.append(score[-1]+len(left)-len(right))\nprint(max(score), score[-1])" }, { "alpha_fraction": 0.5069252252578735, "alphanum_fraction": 0.5193905830383301, "avg_line_length": 17.538461685180664, "blob_id": "34f34c9b71504228150ad0cda47df716eacca2b7", "content_id": "1a57131f664842660be00661901cabc787f1f45d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 722, "license_type": "no_license", "max_line_length": 52, "num_lines": 39, "path": "/面试与笔试/笔试/pingan/2.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nimport json\n\ngraph = json.loads(sys.stdin.readline().strip())\nnode = set()\n\nfor k, v in graph.items():\n node.add(k)\n node.update(v)\n\nkd = {k: i for i, k in enumerate(node)}\n\nmatrix = [[0] * len(node) for i in range(len(node))]\n\nfor k, v in graph.items():\n for vv in v:\n matrix[kd[k]][kd[vv]] = 1\nn = len(node)\nvisited = [0] * n\nans = False\n\n\ndef dfs(nums, i, flag):\n global n\n global ans\n for j in range(n):\n if nums[i][j] == 1 and flag[j] == 1:\n ans = True\n return\n elif nums[i][j] == 1 and flag[j] == 0:\n flag[j] = 1\n dfs(nums, j, flag)\n flag[j] = 0\n\n\nfor i in range(n):\n dfs(matrix, i, visited)\n\nprint(str(ans))" }, { "alpha_fraction": 0.34612491726875305, "alphanum_fraction": 0.3702031672000885, "avg_line_length": 21.133333206176758, "blob_id": "c165fa8956ae1a3898ce8ef39ba1dddc2000127b", "content_id": "f98f364b4de0982bd86ee5131c9638f6daad4e73", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1421, "license_type": "no_license", "max_line_length": 51, "num_lines": 60, "path": "/offer/52.两个链表的第一个公共节点.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n# class ListNode:\n# def __init__(self, x):\n# self.val = x\n# self.next = None\n\n'''\n题目描述\n输入两个链表,找出它们的第一个公共结点。\n'''\nclass Solution:\n # o(m + n)\n def FindFirstCommonNode1(self, pHead1, pHead2):\n # write code here\n p = pHead1\n q = pHead2\n\n n1, n2 = 0, 0\n while p:\n p = p.next\n n1 += 1\n while q:\n q = q.next\n n2 += 1\n if n1 > n2:\n diff = n1 - n2\n p, q = pHead1, pHead2\n while diff:\n p = p.next\n diff -= 1\n else:\n diff = n2 - n1\n p, q = pHead1, pHead2\n while diff:\n q = q.next\n diff -= 1\n while p and q:\n if p == q:\n return p\n p = p.next\n q = q.next\n\n # o(m + n)\n def FindFirstCommonNode(self, pHead1, pHead2):\n p, q = pHead1, pHead2\n while p and q:\n if p == q:\n return p\n if not p.next:\n p = pHead2\n q = q.next\n continue\n if not q.next:\n q = pHead1\n p = p.next\n continue\n p = p.next\n q = q.next\n\n # 还可以使用两个栈,保存两个链表,然后弹出比较\n\n" }, { "alpha_fraction": 0.4981684982776642, "alphanum_fraction": 0.5128205418586731, "avg_line_length": 15.058823585510254, "blob_id": "a5966a7f28ee96a1043c2754347d24dd094ccc88", "content_id": "6c09a01d125f372ecfe4ef7276cf36683ce7cf3b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 297, "license_type": "no_license", "max_line_length": 29, "num_lines": 17, "path": "/面试与笔试/判断是否是回文.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n判断一个字符串是否是回文\n\"\"\"\ndef isPalindrome(s):\n i = 0\n j = len(s) - 1\n while i < j:\n if s[i] != s[j]:\n return False\n i += 1\n j -= 1\n\n return True\n\nprint(isPalindrome(\"assss\"))\nprint(isPalindrome(\"ssss\"))\nprint(isPalindrome(\"assssa\"))\n" }, { "alpha_fraction": 0.5431034564971924, "alphanum_fraction": 0.5678879022598267, "avg_line_length": 28.967741012573242, "blob_id": "3d9ce52073eb082518a51ae0cae1300efe93d576", "content_id": "ed4ecf6bade426298bdd88da3a5465c25c9228b8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1036, "license_type": "no_license", "max_line_length": 112, "num_lines": 31, "path": "/offer/26.树的子结构.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n题目描述\n输入两棵二叉树A,B,判断B是不是A的子结构。(ps:我们约定空树不是任意一个树的子结构)\n\"\"\"\nclass TreeNode:\n def __init__(self, x):\n self.val = x\n self.left = None\n self.right = None\nclass Solution:\n def HasSubtree(self, pRoot1, pRoot2):\n # 递归,多处用到树的遍历\n res = False\n if pRoot1 and pRoot2:\n if pRoot1.val == pRoot2.val:\n res = self.DoseHasSubtree(pRoot1, pRoot2)\n if not res:\n res = self.HasSubtree(pRoot1.left, pRoot2)\n if not res:\n res = self.HasSubtree(pRoot1.right, pRoot2)\n return res\n\n def DoseHasSubtree(self, pRoot1, pRoot2):\n if not pRoot2:\n return True\n if not pRoot1:\n return False\n if pRoot1.val != pRoot2.val:\n return False\n return self.DoseHasSubtree(pRoot1.left, pRoot2.left) and self.DoseHasSubtree(pRoot1.right, pRoot2.right)" }, { "alpha_fraction": 0.44148585200309753, "alphanum_fraction": 0.48389217257499695, "avg_line_length": 77.02564239501953, "blob_id": "bb31a52a585e0a777339c48efeeee2d41a42fc4f", "content_id": "c9c912ce2439385240109681f7db552897e560a0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3042, "license_type": "no_license", "max_line_length": 276, "num_lines": 39, "path": "/面试与笔试/笔试/tx/5.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nn = input()\nmatrix = []\nfor i in range(int(n)):\n matrix.append(list(map(int, sys.stdin.readline().strip().split(' '))))\n\nmin_matrix = [[0]*len(matrix[0]) for i in range(len(matrix))]\nmax_matrix = [[0]*len(matrix[0]) for i in range(len(matrix))]\n\n# print(len(matrix), )\nfor i in range(len(matrix)):\n for j in range(len(matrix[0])):\n if i == 0:\n min_matrix[i][j] = matrix[i][j]\n max_matrix[i][j] = matrix[i][j]\n if j == 0:\n if matrix[i][j] == 0:\n max_matrix[i][j] = max(max_matrix[i - 1][j] * -1, max_matrix[i - 1][j + 1] * -1, min_matrix[i-1][j] * -1, min_matrix[i - 1][j + 1] * -1)\n min_matrix[i][j] = min(max_matrix[i - 1][j] * -1, max_matrix[i - 1][j + 1] * -1, min_matrix[i-1][j] * -1, min_matrix[i - 1][j + 1] * -1)\n else:\n max_matrix[i][j] = max(max_matrix[i-1][j] + matrix[i][j], max_matrix[i-1][j+1] + matrix[i][j], min_matrix[i-1][j] + matrix[i][j], min_matrix[i-1][j+1] + matrix[i][j])\n min_matrix[i][j] = min(max_matrix[i-1][j] + matrix[i][j], max_matrix[i-1][j+1] + matrix[i][j], min_matrix[i-1][j] + matrix[i][j], min_matrix[i-1][j+1] + matrix[i][j])\n elif j == len(matrix[0]) - 1:\n if matrix[i][j] == 0:\n max_matrix[i][j] = max(max_matrix[i - 1][j] * -1, max_matrix[i - 1][j - 1] * -1, min_matrix[i-1][j] * -1, min_matrix[i - 1][j - 1] * -1)\n min_matrix[i][j] = min(max_matrix[i - 1][j] * -1, max_matrix[i - 1][j - 1] * -1, min_matrix[i-1][j] * -1, min_matrix[i - 1][j - 1] * -1)\n else:\n max_matrix[i][j] = max(max_matrix[i-1][j] + matrix[i][j], max_matrix[i-1][j-1] + matrix[i][j], min_matrix[i-1][j] + matrix[i][j], min_matrix[i-1][j-1] + matrix[i][j])\n min_matrix[i][j] = min(max_matrix[i-1][j] + matrix[i][j], max_matrix[i-1][j-1] + matrix[i][j], min_matrix[i-1][j] + matrix[i][j], min_matrix[i-1][j-1] + matrix[i][j])\n\n else:\n if matrix[i][j] == 0:\n max_matrix[i][j] = max(max_matrix[i - 1][j] * -1, max_matrix[i - 1][j - 1] * -1, max_matrix[i - 1][j + 1] * -1, min_matrix[i - 1][j] * -1, min_matrix[i - 1][j - 1] * -1, min_matrix[i - 1][j + 1] * -1)\n min_matrix[i][j] = min(max_matrix[i - 1][j] * -1, max_matrix[i - 1][j - 1] * -1, max_matrix[i - 1][j + 1] * -1, min_matrix[i - 1][j] * -1, min_matrix[i - 1][j - 1] * -1, min_matrix[i - 1][j + 1] * -1)\n else:\n max_matrix[i][j] = max(max_matrix[i - 1][j] + matrix[i][j], max_matrix[i - 1][j - 1] + matrix[i][j], max_matrix[i - 1][j + 1] + matrix[i][j], min_matrix[i - 1][j] + matrix[i][j], min_matrix[i - 1][j - 1] + matrix[i][j], min_matrix[i - 1][j + 1] + matrix[i][j])\n min_matrix[i][j] = min(max_matrix[i - 1][j] + matrix[i][j], max_matrix[i - 1][j - 1] + matrix[i][j], max_matrix[i - 1][j + 1] + matrix[i][j], min_matrix[i - 1][j] + matrix[i][j], min_matrix[i - 1][j - 1] + matrix[i][j], min_matrix[i - 1][j + 1] + matrix[i][j])\n\nprint(max(max_matrix[-1]))" }, { "alpha_fraction": 0.508369505405426, "alphanum_fraction": 0.5300682187080383, "avg_line_length": 30.019229888916016, "blob_id": "211ffc7e89893f76bd42f12ac4fd2732e1bc4b53", "content_id": "348aa1f959ea299c6674cd381ecc0d6847b1f35f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2193, "license_type": "no_license", "max_line_length": 51, "num_lines": 52, "path": "/offer/3.数组中重复的数字.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n## 注意不一定存在重复\n在一个长度为n的数组里的所有数字都在0到n-1的范围内。 数组中某些数字是重复的,但不知道有几个数字是\n重复的。也不知道每个数字重复几次。请找出数组中任意一个重复的数字。 例如,如果输入长度为7的数组\n{2,3,1,0,2,5,3},那么对应的输出是第一个重复的数字2。\n\"\"\"\nimport collections\nclass Solution:\n # 这里要特别注意~找到任意重复的一个值并赋值到duplication[0]\n # 函数返回True/False\n def duplicate1(self, numbers, duplication):\n # 法一:使用内置函数Counter\n c=collections.Counter(numbers)\n for k, v in c.items():\n if v > 1:\n duplication[0] = k\n return True\n return False\n def duplicate2(self, numbers, duplication):\n # 法二:不使用内置函数,修改数组 时间O(n),空间O(1)\n i = 0\n while i < len(numbers):\n if numbers[i] == i:\n i += 1\n continue\n tmp = numbers[numbers[i]]\n if numbers[i] == tmp:\n duplication[0] = tmp\n return True\n else:\n numbers[numbers[i]] = numbers[i]\n numbers[i] = tmp\n return False\n def duplicate(self, numbers, duplication):\n # 法三:修改数组,利用现有数组设置标志,当一个数字被访问过后,可以设置\n # 对应位上的数 + n,之后再遇到相同的数时,会发现对应位上的数已经大于\n # 等于n了,那么直接返回这个数即可\n n = len(numbers)\n for i in range(n):\n index = numbers[i]\n if index >= n:\n index -= n\n if numbers[index] >= n:\n duplication[0] = index\n return True\n numbers[index] += n\n return False\n # 其他思路:\n # 1. 先排序,时间O(nlogn)\n # 2. 利用额外的哈希表存储,将元素放到哈希表对应的下标,时间O(n),空间O(n)\nprint(Solution().duplicate([2,1,3,1,4],[]))\nprint(Solution().duplicate([2,3,1,0,2,5,3],[]))\n" }, { "alpha_fraction": 0.5194805264472961, "alphanum_fraction": 0.5584415793418884, "avg_line_length": 21.647058486938477, "blob_id": "5d532975fd956886cbd63bcecc02e2fc03ba5925", "content_id": "79f2f66dd894825cade0e220d6897b717ecf26a3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 385, "license_type": "no_license", "max_line_length": 72, "num_lines": 17, "path": "/面试与笔试/笔试/hulu/2.2.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nn = int(input().strip())\narr = list(map(int, sys.stdin.readline().strip().split(' ')))\n\nres = 0\ntmp = sorted(arr, reverse=True)\nend = n\nfor i in range(n):\n if end > 0:\n index = \"\".join([str(x) for x in arr[0:end]]).rfind(str(tmp[i]))\n res += tmp[i] * sum(range(index+1,end+1))\n end = index\n else:\n break\n print(i)\n\nprint(res%1000000007)\n" }, { "alpha_fraction": 0.5311315655708313, "alphanum_fraction": 0.5793178081512451, "avg_line_length": 32.563636779785156, "blob_id": "679d9d64eb395a79e4edd4f3644e71f4ed1df9c0", "content_id": "407a88592fe4f67d8932d0bb6cf00390dc827206", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2585, "license_type": "no_license", "max_line_length": 74, "num_lines": 55, "path": "/offer/14.剪绳子.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n题目:给你一根长度为n的绳子,请把绳子剪成m段(m、n都是整数,n>1并且m>1),每段绳子的长度记为k[0],k[1]...k[m]。请问每段绳子\n的最大乘积是多少,例如当绳子长度为8时,把它剪成三段2/3/3,此时得到最大乘积18\n\"\"\"\n\nclass Solution:\n def maxProductAfterCutting1(self, length):\n # 动态规划\n # 长度为n的绳子,在第一刀的时候有n-1种选择,设在i处剪,则f(n)=max(f(i)*f(n-i)) 0<i<n\n # 这是一个从上至下的递归,由于递归会有很多重复的子问题,所以按照自下而上的方法,先算f(2), f(3), f(4)等\n # 当n=2时,只能剪成两段(m>1), f(2)=1; 当n=3时,f(3)=2,\n if length <= 0:\n return -1\n if 0 < length < 4:\n return length-1\n\n products = [0] * (length+1)\n products[1] = 1\n products[2] = 2\n products[3] = 3\n for i in range(4, length+1):\n maxProduct = 0\n for j in range(1, i//2 + 1):\n maxProduct = max(products[j] * products[i-j], maxProduct)\n products[i] = maxProduct\n return products[length]\n\n def maxProductAfterCutting(self, length):\n # 贪婪算法\n # 当 n>= 5时,尽可能多的剪长度为3的绳子,当剩下的绳子长度为4时,把绳子剪成两段长度为2的绳子\n # 证明(反证法):如果你剪绳子,剪了 N 段,假设这时候,这N个数相乘为最大,且存在一个数a\n # 大于等于5,那么 这个数 a 分解成为 3 x (a-3) 比原来的结果更大,所以前面相乘结果不是最大的\n # 所以使得N个数相乘为最大,则不存在大于等于5的一段\n # 书上:2(n-2)>n 3(n-3)>n & 3(n-3) >= 2(n-2) 所以尽可能分成3的段\n if length <= 0:\n return -1\n if 0 < length < 4:\n return length-1\n # 尽可能多减去长度为3的绳子段\n timesOf3 = length//3\n # 当最后剩下的长度为4时,不能再减去长度为3的绳子段\n # 此时更好的方法是剪成2个两段的\n if length - timesOf3 * 3 == 1:\n timesOf3 -= 1\n\n timeOf2 = (length - timesOf3 * 3) // 2\n return pow(3, timesOf3) * pow(2, timeOf2)\n\n\nprint(Solution().maxProductAfterCutting(8))\nprint(Solution().maxProductAfterCutting(0))\nprint(Solution().maxProductAfterCutting(1))\nprint(Solution().maxProductAfterCutting(2))\nprint(Solution().maxProductAfterCutting(3))\nprint(Solution().maxProductAfterCutting(4))\n\n" }, { "alpha_fraction": 0.5993404984474182, "alphanum_fraction": 0.634789764881134, "avg_line_length": 30.128204345703125, "blob_id": "30dacbe1a2294d89c502507753ae6f5d494dae65", "content_id": "0356c8556453fc8886db91a0412d673ed02d3dfc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1381, "license_type": "no_license", "max_line_length": 114, "num_lines": 39, "path": "/offer/33.二叉搜索树的后序遍历序列.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n题目描述\n输入一个整数数组,判断该数组是不是某二叉搜索树的后序遍历的结果。如果是则输出Yes,否则输出No。\n假设输入的数组的任意两个数字都互不相同。\n\n类似题目:二叉搜索树的前序遍历结果\n\"\"\"\nclass Solution:\n def VerifySquenceOfBST(self, sequence):\n # write code here\n if not sequence:\n return False\n return self.VerifySquenceOfBSTRecursion(sequence)\n\n def VerifySquenceOfBSTRecursion(self, sequence):\n if not sequence:\n return True\n root = sequence[-1]\n i = 0\n while i < len(sequence) - 1:\n if sequence[i] < root:\n i += 1\n else:\n break\n for j in range(i, len(sequence) - 1):\n if sequence[j] < root:\n return False\n\n return self.VerifySquenceOfBSTRecursion(sequence[:i]) and self.VerifySquenceOfBSTRecursion(sequence[i:-1])\n\nprint(Solution().VerifySquenceOfBST([5,7,6,9,11,10,8]))\nprint(Solution().VerifySquenceOfBST([5,7,6]))\nprint(Solution().VerifySquenceOfBST([7,4,6,5]))\nprint(Solution().VerifySquenceOfBST([4,8,6,12,16,14,10]))\nprint(Solution().VerifySquenceOfBST([]))\nprint(Solution().VerifySquenceOfBST([1]))\nprint(Solution().VerifySquenceOfBST([1,2,3,4]))\nprint(Solution().VerifySquenceOfBST([4,3,2,1]))" }, { "alpha_fraction": 0.49531251192092896, "alphanum_fraction": 0.520312488079071, "avg_line_length": 20.299999237060547, "blob_id": "b28e3e0dcb73b09b3436e7ed1e3c2195a4784c78", "content_id": "0dc0bac4736437e8f528e0f39c909cc7033805d2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 640, "license_type": "no_license", "max_line_length": 66, "num_lines": 30, "path": "/面试与笔试/笔试/jd/1.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nmatrix = []\nfor i in range(5):\n line = list(map(int, sys.stdin.readline().strip().split(' ')))\n matrix.append(line)\n\ncount = 0\ntmp = {}\nfor i in range(5):\n for j in range(5):\n if matrix[i][j] in tmp.keys():\n tmp[matrix[i][j]].append((i,j))\n else:\n tmp[matrix[i][j]] = [(i,j)]\n\ncount = 0\nfor key in tmp.keys():\n value = tmp[key]\n count1 = 0\n if len(value) < 3:\n count1 += len(value)\n count = 2\n val1 = sorted(value, key=lambda x:x[0])\n val2 = sorted(value, key=lambda x:x[1])\n max_len = 0\n # for val in value:\n # if\n # count = 2\n\nprint(count)\n\n" }, { "alpha_fraction": 0.4929971992969513, "alphanum_fraction": 0.5322129130363464, "avg_line_length": 21.3125, "blob_id": "95b0604fa87a4bdc3a7e16252f68bd68accd2853", "content_id": "5beef2b6146aa8f6f618c7d6c0da2305f419c49d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 357, "license_type": "no_license", "max_line_length": 66, "num_lines": 16, "path": "/面试与笔试/笔试/360-2/1.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\n\nn = int(input())\nnums = []\nfor i in range(n):\n line = list(map(int, sys.stdin.readline().strip().split(\" \")))\n a = line[0]\n t = line[1]\n nums.append([a,t])\nnums.sort(key=lambda x:x[0], reverse=True)\nv = 0\nl = 0\nfor item in nums:\n l += (v * item[1] + 0.5 * item[0] * item[1] * item[1])\n v = v + item[0]*item[1]\nprint(\"%.1f\" %l)\n" }, { "alpha_fraction": 0.5067155361175537, "alphanum_fraction": 0.5189255475997925, "avg_line_length": 18.5, "blob_id": "b5f28dfdeacf58bdd902372ef145474bb335a38c", "content_id": "71eecd21b1a668cdd36143a01f9df695c4166793", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 883, "license_type": "no_license", "max_line_length": 53, "num_lines": 42, "path": "/offer/59.2栈的最大值.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n定义一个栈并实现函数max得到栈的最大值,要求函数max,push,pop的时间复杂度都是O(1)\n\"\"\"\n\nclass Stack:\n def __init__(self):\n self.stack = []\n self.max_stack = []\n def max(self):\n if not self.max_stack:\n return None\n else:\n return self.max_stack[-1]\n\n def push(self, x):\n self.stack.append(x)\n if self.max_stack and self.max_stack[-1] > x:\n return\n else:\n self.max_stack.append(x)\n\n def pop(self):\n if not self.stack:\n return None\n res = self.stack.pop()\n if res == self.max_stack[-1]:\n self.max_stack.pop()\n return res\n\ns = Stack()\nprint(s.pop())\ns.push(3)\nprint(s.max())\nprint(s.pop())\ns.push(5)\ns.push(2)\ns.push(4)\ns.push(1)\nprint(s.max())\nprint(s.pop())\nprint(s.max())\n" }, { "alpha_fraction": 0.44947734475135803, "alphanum_fraction": 0.4799651503562927, "avg_line_length": 25.113636016845703, "blob_id": "c6a7741c6522a601994bfc830a72a14be7c2e76b", "content_id": "a4e98df8d42f1151bf2d64fb7bdc055c8bd1cd2e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1290, "license_type": "no_license", "max_line_length": 68, "num_lines": 44, "path": "/offer/53.1 0~n-1中缺失的数字.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "'''\n一个长度为n-1的递增排序数组中的所有数字都是唯一的,并且都在0~n-1的范围内,这个范围内的n个数只有一个不出现在改数组中,找出这个数\n'''\n\nclass Solution:\n\n # 不够严谨,还有些特殊情况\n def find_miss_value1(self, data):\n if not data:\n return None\n left = 0\n right = len(data) - 1\n while left <= right:\n mid = (left + right) // 2\n if data[mid] == mid:\n left = mid + 1\n elif data[mid] > mid:\n right = mid - 1\n return left\n\n def find_miss_value(self, data):\n if not data:\n return -1\n left = 0\n right = len(data) - 1\n while left <= right:\n mid = (left + right) // 2\n if data[mid] != mid:\n if mid ==0 or data[mid - 1] == mid -1:\n return mid\n right = mid - 1\n else:\n left = mid + 1\n\n if left == len(data):\n return left\n\n return -1\n\nprint(Solution().find_miss_value([0,1,2,4,5]))\nprint(Solution().find_miss_value([1,2,3,4,5]))\nprint(Solution().find_miss_value([0,1,2,3,4]))\nprint(Solution().find_miss_value([0]))\nprint(Solution().find_miss_value([]))" }, { "alpha_fraction": 0.5067934989929199, "alphanum_fraction": 0.542119562625885, "avg_line_length": 25.285715103149414, "blob_id": "186e7b02787c3ed853de362448ed87b84fcea92a", "content_id": "a5028134eb3b206df8e329a997f46328a3dc9c15", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 786, "license_type": "no_license", "max_line_length": 55, "num_lines": 28, "path": "/排序/简单选择排序.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# time: O(n^2)\n# 最好移动次数:0\n# 最差移动次数:n-1\n# 无论好坏,比较次数均为 n(n-1)/2\nclass Solution:\n def select_sort(self, nums):\n for i in range(len(nums)):\n min_num = nums[i]\n index = i\n for j in range(i+1, len(nums)):\n if nums[j] < min_num:\n min_num = nums[j]\n index = j\n nums[i], nums[index] = nums[index], nums[i]\n return nums\n\nnums = []\nprint(Solution().select_sort(nums))\nnums = [1]\nprint(Solution().select_sort(nums))\nnums = [1,2,3]\nprint(Solution().select_sort(nums))\nnums = [4,3,2,1]\nprint(Solution().select_sort(nums))\nnums = [3,1,2,4,3,0]\nprint(Solution().select_sort(nums))\nnums = [1,4,5,6,3,4]\nprint(Solution().select_sort(nums))\n" }, { "alpha_fraction": 0.44404974579811096, "alphanum_fraction": 0.46536412835121155, "avg_line_length": 32.14706039428711, "blob_id": "41f94c929f1a6fc277d5eb3816a199dd1ad7dd9c", "content_id": "a9e5f16275090099e2a37e89d4291051a67f8d87", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1126, "license_type": "no_license", "max_line_length": 74, "num_lines": 34, "path": "/面试与笔试/笔试/mt/2.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nclass Solution:\n def print(self, matrix):\n if not matrix or len(matrix) == 0 or len(matrix[0]) == 0:\n return []\n res = []\n i = 0\n while i * 2 < len(matrix) and i * 2 < len(matrix[0]):\n res += self.circle(matrix, len(matrix), len(matrix[0]), i)\n i += 1\n return res\n\n def circle(self, matrix, r, c, s):\n res = []\n for i in range(s, c-s):\n res.append(matrix[s][i])\n if s < r - s - 1:\n for i in range(s + 1, r - s):\n res.append(matrix[i][c - s -1])\n if s < c-s - 1 and s < r - s - 1:\n for i in range(c-s-2, s-1, -1):\n res.append(matrix[r - s - 1][i])\n if s < c - s - 1 and s < r - s - 2:\n for i in range(r - s - 2, s, -1):\n res.append(matrix[i][s])\n return res\n\nline = list(map(int, sys.stdin.readline().strip().split(\" \")))\nM = line[0]\nN = line[1]\nmatrix = []\nfor i in range(M):\n matrix.append(list(map(int, sys.stdin.readline().strip().split(\" \"))))\nprint(' '.join([str(x) for x in Solution().print(matrix)]))" }, { "alpha_fraction": 0.6196236610412598, "alphanum_fraction": 0.6276881694793701, "avg_line_length": 34.380950927734375, "blob_id": "27d4c0feabe2cd79b9803ee7251d86fba69a87b2", "content_id": "0119c989501ac6c7b23bac23594c695bde1ccf37", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1116, "license_type": "no_license", "max_line_length": 99, "num_lines": 21, "path": "/offer/把数组排成最小的数.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n首先将数组中的元素转化成字符串的形式,以便直接进行拼接。可以看到,输出结果就是对原来的数组元素进行排序之后,再连接输出。但是,这里不能用传统的单纯比较大小的规则,这里需要重新定义一个比较大小的规则:\n若ab > ba 则 a > b,\n若ab < ba 则 a < b,\n若ab = ba 则 a = b;\n因此,可以利用这个规则,对转换成字符串的数组进行排序,并连接起来。\n因此,我们可以在快排中修改排序规则,利用修改之后的快排对字符进行排序。\n\"\"\"\n# -*- coding:utf-8 -*-\nclass Solution:\n # 自定义排序规则加快排\n def PrintMinNumber(self, numbers):\n # write code here\n if numbers is None or len(numbers) == 0:\n return \"\"\n numbers = map(str, numbers)\n pivot = numbers[0]\n less = [i for i in numbers[1:] if (pivot+i)>(i+pivot)]\n great = [i for i in numbers[1:] if (pivot+i)<=(i+pivot)]\n result = \"\".join(self.PrintMinNumber(less))+pivot+\"\".join(self.PrintMinNumber(great))\n return result.lstrip(\"0\")\n\n" }, { "alpha_fraction": 0.5446118116378784, "alphanum_fraction": 0.5504055619239807, "avg_line_length": 22.97222137451172, "blob_id": "2b2e960bffef887ca805cc2bc06147c9c69d52ce", "content_id": "e344c2d625384d7d213352c87455dc0c1a620d6b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1251, "license_type": "no_license", "max_line_length": 57, "num_lines": 36, "path": "/offer/23.链表中环的入口结点.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n题目描述\n给一个链表,若其中包含环,请找出该链表的环的入口结点,否则,输出null。\n\"\"\"\nclass ListNode:\n def __init__(self, x):\n self.val = x\n self.next = None\n\"\"\"\n1. 是否存在环 2. 环入口\n思路1:首先设置一个slow和一个fast指针,slow每次走一步,fast每次走两步,若相遇了,则说明一定存在环\n然后设置指针p从头出发,slow 从相遇点继续走,在p和slow会在入口相遇\n\n思路2:设置slow和fast判断是否存在环,然后统计出环中节点个数,然后再来两个指针p和q从头开始,p先走n步,\n然后q以相同的速度来追,则追上点为入口\n\n鲁棒性:输入头为空;不存在环\n\"\"\"\nclass Solution:\n def EntryNodeOfLoop(self, pHead):\n # write code here\n if not pHead:\n return None\n slow = fast = pHead\n while fast and fast.next:\n slow = slow.next\n fast = fast.next.next\n if slow == fast: # 相遇\n p = pHead\n while p != slow:\n p = p.next\n slow = slow.next\n return p\n else: # 不存在环\n return None\n" }, { "alpha_fraction": 0.38530465960502625, "alphanum_fraction": 0.43189963698387146, "avg_line_length": 36.266666412353516, "blob_id": "6c87d607fc088a1823c0de1c94a93a0ba0cbb7ed", "content_id": "bbcbdf7f678ade8fb470ca6757b7a070adabdb95", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 558, "license_type": "no_license", "max_line_length": 68, "num_lines": 15, "path": "/面试与笔试/笔试/pdd/4.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "N = int(input())\nL = list(map(int, input().strip().split(' ')))\nW = list(map(int, input().strip().split(' ')))\nL, W = zip(*sorted((zip(*[L, W])),key=lambda x:x[0]))\ndp = [[[0, 0, 0] for _ in range(2)] for _ in range(N)]\nfor i in range(N):\n for j in [0,1]:\n if 7*W[i] >= dp[i-1][j][0] and L[i] > dp[i-1][j][1]:\n dp[i][0] = dp[i-1][j]\n dp[i][1] = [dp[i-1][j][0] + W[i], L[i], dp[i-1][j][2]+1]\n else:\n dp[i][0] = dp[i-1][j]\n dp[i][1] = dp[i-1][j]\nans = max([dp[i][1][2] for i in range(N)])\nprint(ans)" }, { "alpha_fraction": 0.42424243688583374, "alphanum_fraction": 0.46736598014831543, "avg_line_length": 24.62686538696289, "blob_id": "bc5b1f4abeb9f5a3ce1270282d677f5d856e77c8", "content_id": "45aa91d8ed034761975f2cc2eb18a04502fb7177", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1946, "license_type": "no_license", "max_line_length": 60, "num_lines": 67, "path": "/offer/51.数组中的逆序对.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n'''\n在数组中的两个数字,如果前面一个数字大于后面的数字,则这两个数字组成一个逆序对。输入一个数组,求出这个数组中的逆序对的总\n数P。并将P对1000000007取模的结果输出。 即输出P%1000000007\n示例1\n输入\n1,2,3,4,5,6,7,0\n输出\n7\n'''\nclass Solution:\n # 两个数组从后往前合并\n def merge1(self, left, right):\n i, j = len(left) - 1, len(right) - 1\n res = []\n count = 0\n while i >= 0 and j >= 0:\n if left[i] <= right[j]:\n res = [right[j]] + res\n j -= 1\n else:\n res = [left[i]] + res\n count = count + (j + 1)\n i -= 1\n res = (left[:i+1] or right[:j+1]) + res\n return res, count\n\n # 两个数组从前往后合并\n def merge(self, left, right):\n i, j = 0, 0\n res = []\n count = 0\n while i < len(left) and j < len(right):\n if left[i] <= right[j]:\n res.append(left[i])\n i += 1\n else:\n res.append(right[j])\n count += (len(left) - i)\n j += 1\n res += left[i:] or right[j:]\n return res, count\n\n def sort(self, data):\n # write code here\n if not data or len(data) <= 1:\n return data, 0\n\n mid = len(data) // 2\n left, n1 = self.sort(data[:mid])\n right, n2 = self.sort(data[mid:])\n res, n3 = self.merge(left, right)\n return res, n1 + n2 + n3\n\n def InversePairs(self, data):\n if not data:\n return 0\n res, count = self.sort(data)\n return count\n\nif __name__ == '__main__':\n print(Solution().InversePairs([1,3,2,3,1]))\n print(Solution().InversePairs([1,3,2,1]))\n print(Solution().InversePairs([1,2,3,4,5,6,7,0]))\n\n\n# 用python总是会超时,有博客说用java不会" }, { "alpha_fraction": 0.47015833854675293, "alphanum_fraction": 0.47868454456329346, "avg_line_length": 23.909090042114258, "blob_id": "8a06e7dbb98969ce6eef393a4b794b32e53d23b4", "content_id": "bfd2cd1cd6578130adcacb3c2c674e88ef4ab708", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 917, "license_type": "no_license", "max_line_length": 82, "num_lines": 33, "path": "/offer/58.翻转单词顺序.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n'''\n输入一个英文句子,翻转句子中单词顺序,但单词内字符的顺序不变,标点符号和普通字母一样处理,例如:i am a student. => student. a am i\n'''\nclass Solution:\n def ReverseSentence(self, s):\n # write code here\n if not s:\n return \"\"\n s = self.reverse(s)\n segs = s.split(' ')\n res = []\n for seg in segs:\n res.append(self.reverse(seg))\n return ' '.join(res)\n\n def reverse(self, s):\n if not s:\n return \"\"\n s = list(s)\n start = 0\n end = len(s) - 1\n while start < end:\n s[start], s[end] = s[end], s[start]\n start += 1\n end -= 1\n return ''.join(s)\n\n def ReverseSentence1(self, s):\n s = s.split(' ')\n return ' '.join(s[::-1])\n\nprint(Solution().ReverseSentence('i am a student.'))" }, { "alpha_fraction": 0.47031962871551514, "alphanum_fraction": 0.4897260367870331, "avg_line_length": 23.97142791748047, "blob_id": "bd03f533ec3a988aef96c02ed7928a167e80d6e1", "content_id": "e61eff7a4aa1116615aefd31e5c976ef6069374a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 986, "license_type": "no_license", "max_line_length": 65, "num_lines": 35, "path": "/offer/53.数字在排序数组中出现的次数.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# coding = utf-8\n\n'''\n题目描述\n统计一个数字在排序数组中出现的次数。\n'''\n\n\nclass Solution:\n def GetNumberOfK(self, data, k):\n left = 0\n right = len(data) - 1\n leftk = self.getleftK(data, k, left, right)\n rightk = self.getrightK(data, k, left, right)\n return rightk - leftk + 1\n\n def getleftK(self, data, k, left, right):###查找重复数字中最左边的那个数字位置\n while left <= right:\n mid = (left + right) // 2\n if data[mid] < k:\n left = mid + 1\n else:\n right = mid - 1\n return left\n\n def getrightK(self, data, k, left, right):###查找重复数字最右边的那个数字位置\n while left <= right:\n mid = (left + right) // 2\n if data[mid] <= k:\n left = mid + 1\n else:\n right = mid - 1\n return right\n\nprint(Solution().GetNumberOfK([1,2,3,3,3,4], 2))\n\n\n" }, { "alpha_fraction": 0.4486754834651947, "alphanum_fraction": 0.5049669146537781, "avg_line_length": 26.454545974731445, "blob_id": "d3ef9a2fbe191860f8217db79ee0a3cfeb4d7ed5", "content_id": "07d557673bce271349aa9b463e7ef19d3af5f979", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 874, "license_type": "no_license", "max_line_length": 53, "num_lines": 22, "path": "/offer/10.3矩阵覆盖.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n题目描述\n我们可以用2*1的小矩形横着或者竖着去覆盖更大的矩形。请问用n个2*1的小矩形无重叠地\n覆盖一个2*n的大矩形,总共有多少种方法?\n\"\"\"\nclass Solution:\n def rectCover(self, number):\n # 用第一个2*1的矩阵去覆盖大矩阵最左边的时候,有两种覆盖方式\n # 横着放:则左下角也只能再横着放一个2*1,所以还剩右边2*(n-2)的区域f(n-2)\n # 竖着放:则最左边的区域被占据,还剩右边n-1的区域,f(n-1)\n # f(n) = f(n-1) + f(n-2)\n if number <= 0:\n return 0\n res = [1, 2, -1]\n if number < 3:\n return res[number - 1]\n for i in range(3, number + 1):\n res[2] = res[0] + res[1]\n res[0] = res[1]\n res[1] = res[2]\n return res[2]\n" }, { "alpha_fraction": 0.4650406539440155, "alphanum_fraction": 0.48373982310295105, "avg_line_length": 25.7608699798584, "blob_id": "38f680f1dc9b8fb71a38006bf7e7b69b411ff7e4", "content_id": "1b3594f2ef99160004f013b1da78a164a22a62d0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1230, "license_type": "no_license", "max_line_length": 65, "num_lines": 46, "path": "/面试与笔试/笔试/didi/3.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# import sys\n# line1 = list(map(int, sys.stdin.readline().strip().split(' ')))\n# line2 = list(map(int, sys.stdin.readline().strip().split(' ')))\n# line3 = list(map(int, sys.stdin.readline().strip().split(' ')))\n#\n# n = line1[0]\n# m = line1[1]\n# d = line1[2]\n#\n# print(2)\n\nif __name__ == \"__main__\":\n from copy import copy\n\n nn, mm, D = map(int, input().strip().split())\n sps = list(map(int, input().strip().split()))\n father = list(map(int, input().strip().split()))\n\n from collections import defaultdict\n\n d = defaultdict(list)\n for index, node in enumerate(father):\n d[node].append(index + 2)\n d[index + 2].append(node)\n\n ress = [0] * (nn + 1)\n for spe in sps:\n vis = [1] + [0] * nn\n vis[spe] = 1\n depth = 0\n ress[spe] = max(ress[spe], depth)\n candis = copy(d[spe])\n while not all(vis):\n depth += 1\n new_candi = []\n for c in candis:\n if not vis[c]:\n vis[c] = 1\n ress[c] = max(ress[c], depth)\n new_candi.extend(d[c])\n candis = new_candi\n res = 0\n for num in ress[1:]:\n if num <= D:\n res += 1\n print(res)" }, { "alpha_fraction": 0.5112651586532593, "alphanum_fraction": 0.5285961627960205, "avg_line_length": 23, "blob_id": "f0c79fe6efc34f113095c08417d410ce30be74b5", "content_id": "da5b00d24942c158b92b7ac913f0999761bd44b6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 577, "license_type": "no_license", "max_line_length": 103, "num_lines": 24, "path": "/面试与笔试/笔试/zhaohang/3.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\ndef recu(i, n, a, b, position):\n\n\ndef f(n, a, b, position):\n res = 1\n for i in range(n):\n tmp = 1\n # for j in range(i,n):\n # if abs(position[i][0] - position[j][0]) < a and abs(position[i][1] - position[j][1]) < b:\n # tmp += 1\n res = max(res, tmp)\n return res\n\nnums = list(map(int, sys.stdin.readline().strip().split(\" \")))\nn = nums[0]\na = nums[1]\nb = nums[2]\nposition = []\nfor i in range(n):\n position.append(list(map(int, sys.stdin.readline().strip().split(\" \"))))\n\nres = f(n, a, b, position)\nprint(res)\n\n" }, { "alpha_fraction": 0.5120910406112671, "alphanum_fraction": 0.5170696973800659, "avg_line_length": 22.383333206176758, "blob_id": "e93f26ea9bf1035f0c17c71185eb711dbb62903a", "content_id": "02f05c9ab72f1a038bdc3528c663ea998e7456e7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1528, "license_type": "no_license", "max_line_length": 57, "num_lines": 60, "path": "/offer/36.1二叉搜索树与双向链表.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "'''\n题目描述\n输入一棵二叉搜索树,将该二叉搜索树转换成一个排序的双向链表。要求不能创建任何新的结点,只能调整树中结点指针的指向。\n'''\n# -*- coding:utf-8 -*-\nclass TreeNode:\n def __init__(self, x):\n self.val = x\n self.left = None\n self.right = None\n\n\nclass Solution:\n def Convert1(self, pRootOfTree):\n # write code here\n if not pRootOfTree:\n return None\n head = cur = TreeNode(0)\n self.ConvertTreeToList(pRootOfTree, cur)\n head = head.right\n head.left = None\n return head\n\n def ConvertTreeToList(self, root, cur):\n\n if root.left:\n cur = self.ConvertTreeToList(root.left, cur)\n cur.right = root\n root.left = cur\n cur = root\n if root.right:\n cur = self.ConvertTreeToList(root.right, cur)\n return cur\n\n def Convert(self, pRootOfTree):\n if not pRootOfTree:\n return None\n head = cur = TreeNode(0)\n stack = []\n p = pRootOfTree\n while p or stack:\n if p:\n stack.append(p)\n p = p.left\n else:\n p = stack.pop()\n cur.right = p\n p.left = cur\n cur = cur.right\n p = p.right\n\n head = head.right\n head.left = None\n return head\n\n\nroot = TreeNode(3)\nroot.left = TreeNode(2)\nroot.right = TreeNode(4)\nprint(Solution().Convert(root).val)\n\n\n\n" }, { "alpha_fraction": 0.6369258165359497, "alphanum_fraction": 0.6969964504241943, "avg_line_length": 25.325580596923828, "blob_id": "71dd0fca830e20ba6363b211e18db43cb3a49120", "content_id": "00e3226a6830b832f5136a1c63c1f0a7a659b6f7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2034, "license_type": "no_license", "max_line_length": 290, "num_lines": 43, "path": "/面试与笔试/得到奖金的概率.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n小明,小华是校内公认的数据算法大牛。两人组队先后参加了阿里云天池大赛多项奖金赛事,多次获奖, 小明是其中的队长。最近的一次工业数据智能竞赛中,两人又斩获季军,获得奖金1万元。\n作为算法大牛,两人竞赛奖金分配也有独特方式,由两人共同编写的一个程序来决定奖金的归属。每次获奖后,这个程序首先会随机产生若干0-1之间的实数{p_1,p_2,...,p_n}。然后从小明开始,第一轮以p_1的概率将奖金全部分配给小明,第二轮以p_2的概率将奖金全部分配给小华,这样交替地小明、小华以p_i的概率获得奖金的全部,一旦奖金被分配, 则程序终止,如果n轮之后奖金依然没发出,则从从p_1开始继续重复(这里需要注意,如果n是奇数,则第二次从p_1开始的时候,这一轮是以p_1的概率分配给小华) ;自到100轮,如果奖金还未被分配,程序终止,两人约定通过支付宝将奖金捐出去。\n\n输入:\n\n输人数据包念N+1行,\n\n第一行包含一个整数N\n接下来N行,每行一个0-1之间的实数, 从p_1到p_N\n\n输出:\n单独一行,输出一个小教,表示小明最终获得奖金的概率,结果四舍五入, 小数点后严格保留4位(这里需要注意,如果结果为0.5,则输出0.5000)\n\n\"\"\"\nimport sys\nN = int(sys.stdin.readline().strip())\np = list(map(float,sys.stdin.readline().strip().split()))\np100 = [0.0]*100\nfor i in range(100):\n p100[i] = p[i%N]\nj = 0\nsum = 0\npre = 1\nwhile j < 100:\n sum += pre * p100[j]\n pre *= (1 - p100[j]) * (1 - p100[j+1])\n j += 2\nprint('%.4f' %sum)\n\n# 法二\nimport sys\nN=int(sys.stdin.readline().strip())\nPList=list(map(float,sys.stdin.readline().strip().split()))\nPMin=0\nAllP=1\nfor i in range(100):\n j=i%N\n if i%2==0:\n PMin+=AllP*PList[j] # 整日里用的是j,不是i,所以对于新的一轮来说不会错\n AllP=AllP-AllP*PList[j]\n#print(\"%.4f\"%PMin)\nsys.stdout.write(\"%.4f\"%PMin)\n" }, { "alpha_fraction": 0.403940886259079, "alphanum_fraction": 0.43842363357543945, "avg_line_length": 22.882352828979492, "blob_id": "3542c8cb0dfdb0d3cfa4e4f9e07bff8e16301fcb", "content_id": "6e49484b839aa3a1251bf08634053e14b260464d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 406, "license_type": "no_license", "max_line_length": 54, "num_lines": 17, "path": "/面试与笔试/笔试/nvidia/3.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nwhile True:\n try:\n line = sys.stdin.readline().strip().split('.')\n line = line[::-1]\n res = 0\n for j, i in enumerate(line):\n res += 256 ** j * int(i)\n print(res)\n\n line = int(input())\n res = []\n for i in range(3, -1, -1):\n res.append(str(line//(256**i)%256))\n print('.'.join(res))\n except:\n break\n" }, { "alpha_fraction": 0.4009779989719391, "alphanum_fraction": 0.4413203001022339, "avg_line_length": 26.299999237060547, "blob_id": "48dacbda3cce3fc0643391416f07b40368b26cb0", "content_id": "fbd783bef344df323fae7dafbcbb0fe52fddc264", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 830, "license_type": "no_license", "max_line_length": 66, "num_lines": 30, "path": "/其他/test.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nn = int(sys.stdin.readline().strip())\nres = []\np = []\nfor i in range(n):\n p.append(list(map(int, sys.stdin.readline().strip().split())))\nfor i in range(n):\n if p[i][0] == p[i][2]: # 在列上\n low = min(p[i][1], p[i][3])\n high = max(p[i][1], p[i][3])\n if (low, p[i][0]) in res and (high, p[i][0]) in res:\n continue\n for j in range(low, high+1):\n if (j, p[i][0]) not in res:\n res.append((j, p[i][0]))\n else: # 在行上\n low = min(p[i][0], p[i][2])\n high = max(p[i][0], p[i][2])\n if (p[i][1], low) in res and (p[i][1], high) in res:\n continue\n for j in range(low, high + 1):\n if (p[i][1], j) not in res:\n res.append((p[i][1], j))\nprint(len(res))\n\"\"\"\n3\n1 2 3 2\n2 5 2 3\n1 4 3 4\n\"\"\"" }, { "alpha_fraction": 0.4519084095954895, "alphanum_fraction": 0.4885496199131012, "avg_line_length": 22.39285659790039, "blob_id": "aefe5627c4ee658be793dfcf51b61fcf5275ecb5", "content_id": "c20f24bb8ed0a4dbd780bbe2190ee84aa5adebe9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 655, "license_type": "no_license", "max_line_length": 40, "num_lines": 28, "path": "/面试与笔试/笔试/ks/4.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\ndef solve(eq,var='x'):\n eq1 = eq.replace(\"=\",\"-(\")+\")\"\n eq1 = eq1.replace(\"x\",\"*x\")\n eq1 = eq1.replace(\"+*x\",\"+x\")\n eq1 = eq1.replace(\"-*x\",\"-x\")\n eq1 = eq1.replace(\"(*x\",\"(x\")\n eq1 = eq1.strip(\"*\")\n # print(eq1)\n try:\n c = eval(eq1,{var:1j})\n res = -c.real/c.imag\n if int(res) == res:\n return int(res)\n # return -c.real/c.imag\n else:\n return -1\n except:\n return -1\n\nline = str(sys.stdin.readline().strip())\nline = line.replace(\"*\", \"\")\nline = line.lower()\n# print(line)\nprint(solve(line))\n# test = '10x-2x-8=x+7+4x'\n# test = '2x=6'\n# print(solve(test))\n" }, { "alpha_fraction": 0.6076642274856567, "alphanum_fraction": 0.6332116723060608, "avg_line_length": 20.038461685180664, "blob_id": "a021a085b54b7831f08f10a69e8e5ae19f8d9ac5", "content_id": "a130f307598368e1784523737830201f9f5670cb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1410, "license_type": "no_license", "max_line_length": 61, "num_lines": 52, "path": "/面试与笔试/二叉树中节点的最大距离.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n求两个节点之间最远的距离:\n     (1)两个节点都是叶子结点\n     (2)一个是叶子结点一个是根节点\n思路:\n     (1)如果具有最远距离的两个节点经过了根节点,那么最远的距离就是左边最深的深度加上右边最深的深度之和。\n     (2)如果具有最远距离的两个节点之间的路径不经过根节点,那么最远的距离就在根节点的其中一个子树上的两个叶子结点。\n参考:https://blog.csdn.net/poison_biti/article/details/75798001\n\"\"\"\nclass TreeNode:\n def __init__(self, val):\n self.val = val\n self.left = None\n self.right = None\nclass Solution:\n def __init__(self):\n self.dis = 0\n\n def height(self, root):\n if not root:\n return 0\n left = self.height(root.left)\n right = self.height(root.right)\n self.dis = max(self.dis, left + right)\n return max(left, right) + 1\n\n# root = TreeNode(0)\n# left = TreeNode(0)\n# right = TreeNode(0)\n#\n# root.left = left\n# root.right = right\n# left.left = TreeNode(0)\n# left.right = TreeNode(0)\n# left.right.left = TreeNode(0)\n#\n# right.right = TreeNode(0)\n\n\nroot = TreeNode(0)\nleft = TreeNode(0)\n\nroot.left = left\nleft.left = TreeNode(0)\nleft.left.left = TreeNode(0)\nleft.right = TreeNode(0)\nleft.right.right = TreeNode(0)\n\n\ns = Solution()\ns.height(root)\nprint(s.dis)\n\n\n" }, { "alpha_fraction": 0.4321784973144531, "alphanum_fraction": 0.4668232500553131, "avg_line_length": 28.894737243652344, "blob_id": "af4764351d4ff1bd4ec9923ac76c09e1335dba28", "content_id": "1e2dae87c50458f750ec8f925c6317664848592c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1719, "license_type": "no_license", "max_line_length": 76, "num_lines": 57, "path": "/面试与笔试/笔试/wy/5.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# def reverse(L):\n# res = L[::-1]\n# res_copy = res.copy()\n# while res[0] == 0:\n# del res[0]\n# return res_copy, res\n#\n#\n# L0 = [int(n) for n in input()]\n# L1 = [int(n) for n in input()]\n# L2 = [int(n) for n in input()]\n# L0_copy, L0_res = reverse(L0)\n# L1_copy, L1_res = reverse(L1)\n#\n#\n# class Solution:\n# def __init__(self):\n# self.flag = \"NO\"\n# def search(self, target, index):\n# if index == len(target):\n# self.flag = \"YES\"\n# if target[index : index + len(L0) ] == L0:\n# self.search(target, index + len(L0))\n# if target[index:index+len(L1)] == L1:\n# self.search(target, index+len(L1))\n# if target[index:index+len(L0_res)] == L0_res:\n# self.search(target, index+len(L0_res))\n# if target[index:index+len(L1_res)] == L1_res:\n# self.search(target, index + len(L1_res))\n# if target[index:index+len(L0_copy)] == L0_copy:\n# self.search(target, index+len(L0_copy))\n# if target[index:index+len(L1_copy)] == L1_copy:\n# self.search(target, index + len(L1_copy))\n#\n#\n# s = Solution()\n# s.search(L2, 0)\n# print(s.flag)\n\n\n# 腾讯面试 编辑距离\ndef distance(s1, s2):\n dp = [[0] * (len(s2)+1) for i in range(len(s1)+1)]\n for i in range(len(s1) + 1):\n for j in range(len(s2) + 1):\n if i == 0:\n dp[i][j] = j\n elif j == 0:\n dp[i][j] = i\n else:\n if s1[i-1] == s2[j-1]:\n dp[i][j] = dp[i-1][j-1]\n else:\n dp[i][j] = 1 + min(dp[i-1][j-1], dp[i][j-1], dp[i-1][j])\n return dp[-1][-1]\n\nprint(distance(\"\", \"a\"))" }, { "alpha_fraction": 0.42241379618644714, "alphanum_fraction": 0.45043104887008667, "avg_line_length": 17.600000381469727, "blob_id": "c1b67de2b0b00714c6eb9b4d0f905cf3971554b4", "content_id": "395a138a31762722e5191033528726ae8fe9a16e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 514, "license_type": "no_license", "max_line_length": 51, "num_lines": 25, "path": "/面试与笔试/笔试/pingan/111.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# coding=utf-8\n# 本题为考试多行输入输出规范示例,无需提交,不计分。\n\n\nimport sys\n\nif __name__ == \"__main__\":\n\n line = sys.stdin.readline().strip()\n arr = list(map(int, line.split()))\n a = arr[0]\n b = arr[1]\n\n low = 0\n high = a\n\n while low < high:\n mid = (low + high) / 2\n if abs(pow(mid, b) - a) <= float(0.000001):\n print(\"%.6f\" % mid)\n break\n elif pow(mid, b) < a:\n low = mid\n else:\n high = mid" }, { "alpha_fraction": 0.520432710647583, "alphanum_fraction": 0.5480769276618958, "avg_line_length": 23.47058868408203, "blob_id": "df3e07e4bba33ab8c4e1c94377a2f637b601562f", "content_id": "e0ecfdafb418950dc696599ebf8c77c3b0e56a06", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 832, "license_type": "no_license", "max_line_length": 64, "num_lines": 34, "path": "/offer/9.1用两个队列实现栈.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "class Queue:\n def __init__(self):\n self.queue1 = []\n self.queue2 = []\n\n def push(self, node):\n if (not self.queue1 and not self.queue2) or self.queue1:\n self.queue1.append(node)\n else:\n self.queue2.append(node)\n\n def pop(self):\n if self.queue1:\n while len(self.queue1) > 1:\n self.queue2.append(self.queue1.pop(0))\n return self.queue1.pop(0)\n elif self.queue2:\n while len(self.queue2) >1:\n self.queue1.append(self.queue2.pop(0))\n return self.queue2.pop(0)\n else:\n return None\n\nqueue = Queue()\nqueue.push('a')\nqueue.push('b')\nqueue.push('c')\nprint(queue.pop())\nqueue.push('d')\nprint(queue.pop())\nprint(queue.pop())\nqueue.push('e')\nprint(queue.pop())\nprint(queue.pop())\n" }, { "alpha_fraction": 0.5946745276451111, "alphanum_fraction": 0.6035503149032593, "avg_line_length": 18.882352828979492, "blob_id": "0ffaa774ec4166bf4daae369bc40626149f9051e", "content_id": "454deef794ebaca19c0283401862b2437d56c5b2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 870, "license_type": "no_license", "max_line_length": 50, "num_lines": 34, "path": "/offer/22.1链表的中间节点.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n求链表的中间节点,如果链表中的节点综述为奇数,则返回中间节点,如果节点综述是偶数,则返回中间两个节点\n的任意一个\n\"\"\"\n# 思路:设计两个指针,一个每次走一步,另一个每次走两步\n# 鲁棒性:输入的链表为空则返回空;\nclass ListNode:\n def __init__(self, x):\n self.val = x\n self.next = None\n\nclass Solution:\n def find_middle(self, head):\n if not head:\n return None\n slow = fast = head\n while fast.next and fast.next.next:\n slow = slow.next\n fast = fast.next.next\n return slow\n\na = ListNode(1)\nb = ListNode(2)\nc = ListNode(3)\nd = ListNode(4)\ne = ListNode(5)\nf = ListNode(6)\na.next = b\nb.next = c\nc.next = d\nd.next = e\ne.next = f\nprint(Solution().find_middle(a).val)\nprint(Solution().find_middle(None))\n" }, { "alpha_fraction": 0.4756584167480469, "alphanum_fraction": 0.5131683945655823, "avg_line_length": 25.680850982666016, "blob_id": "c32991d9a1d80e3aeb418f411268f09e7ee4c685", "content_id": "6ca2dac83c9967a4daf900d6d4418f5ba4475447", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1425, "license_type": "no_license", "max_line_length": 45, "num_lines": 47, "path": "/offer/16.数值的整数次方.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\nclass Solution:\n def Power1(self, base, exponent):\n # 错误的法一:自以为简单的方法\n # 没有考虑指数为负数,没有考虑底数为0\n res = 1\n for i in range(exponent):\n res *= base\n return res\n\n invalid_input = False\n\n def Power(self, base, exponent):\n # 法二:全面但是不高效的方法\n res = 1\n if base == 0 and exponent <= 0:\n invalid_input = True\n return 0\n for i in range(abs(exponent)):\n res *= base\n if exponent < 0:\n res = 1/res\n return res\n def Power2(self, base, exponent):\n # 这种方法在python里面也出错,当exponent为负数的时候\n # 一个数的32次方等于16次方的平方\n # 当n为偶数时: a^n = a^(n/2) * a^(n/2)\n # 当n为奇数时:a^n = a^(n/2) * a^(n/2) * a\n if exponent == 0:\n return 1\n if exponent == 1:\n return base\n res = self.Power(base, exponent >> 1)\n res *= res\n if exponent & 1 == 1:\n res *= base\n return res\n\nprint(-2 >> 10)\nprint(Solution().Power(2.0, 3))\nprint(Solution().Power(-2.0, 3))\nprint(Solution().Power(2.0, -2))\nprint(Solution().Power(0, -3))\nprint(Solution().Power(0, 3))\nprint(Solution().Power(-2, -3))\nprint(Solution().Power(-3, 0))\nprint(Solution().Power(0, 0))" }, { "alpha_fraction": 0.48051947355270386, "alphanum_fraction": 0.501298725605011, "avg_line_length": 29.81999969482422, "blob_id": "dffc2f177e3a5cb8c10db872a7ae8f1091b0d4a4", "content_id": "1ce7bd9e65ddf4b8d5de885546246680be0d84ce", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1922, "license_type": "no_license", "max_line_length": 81, "num_lines": 50, "path": "/offer/21.调整数组顺序使奇数位于偶数前面.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n# 法一:从前往后扫描,每遇到一个偶数,则把其后面的数都往前移一位,然后把这个偶数放到最后,复制度为O(n^2)\n\n# 可以把判断是否是奇数的部分提出来写一个单独的函数,这样可扩展\n\nclass Solution:\n def reOrderArray1(self, array):\n # 法二: 这种方法不能保证奇数和奇数,偶数和偶数之间的相对位置不变\n # 设置两个指针,一个指向头部,一个指向尾部,然后向内移动,\n # 分别找到一个奇数一个偶数,然后调换位置\n if array is None:\n return\n low = 0\n high = len(array)-1\n while low < high:\n while low < high and array[low] % 2 != 0:\n low += 1\n while low < high and array[high] % 2 == 0:\n high -= 1\n if low < high:\n array[low], array[high] = array[high], array[low]\n return array\n def reOrderArray(self, array):\n # 更简单的方法,但是占内存\n odd = [item for item in array if item % 2 == 1 ]\n even = [item for item in array if item % 2 == 0]\n return odd + even\n\n def reOrderArray2(self, array):\n # 为了保证相对位置不变\n if not array:\n return array\n low = 0\n while low < len(array):\n while low < len(array) and array[low] % 2 != 0:\n low += 1\n high = low + 1\n while high < len(array) and array[high] % 2 == 0:\n high += 1\n if high < len(array):\n # array[low], array[high] = array[high], array[low] 这里不能用交换,结果不对\n array.insert(low, array.pop(high))\n low += 1\n else:\n break\n return array\n\narray = [1,2,3,4,5,6,7]\nSolution().reOrderArray(array)\nprint(array)" }, { "alpha_fraction": 0.5397923588752747, "alphanum_fraction": 0.5732410550117493, "avg_line_length": 23.08333396911621, "blob_id": "800bddcdc2f5334da8d0b647b4c275f1a103ad6d", "content_id": "c562658a9e906bd330e2e800642da515faed236d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1085, "license_type": "no_license", "max_line_length": 81, "num_lines": 36, "path": "/offer/7.重建二叉树.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n题目描述\n输入某二叉树的前序遍历和中序遍历的结果,请重建出该二叉树。假设输入的前序遍历和中序遍历的\n结果中都不含重复的数字。例如输入前序遍历序列{1,2,4,7,3,5,6,8}和中序遍历序列{4,7,2,1,5,3,8,6},\n则重建二叉树并返回。\n\"\"\"\n\n\n# -*- coding:utf-8 -*-\nclass TreeNode:\n def __init__(self, x):\n self.val = x\n self.left = None\n self.right = None\n\nclass Solution:\n # 返回构造的TreeNode根节点\n def reConstructBinaryTree(self, pre, tin):\n # write code here\n if not pre or not tin or len(pre) == 0 or len(tin) == 0:\n return None\n root = TreeNode(pre[0])\n try: # 考虑前序和中序不匹配的情况\n index = tin.index(pre[0])\n root.left = self.reConstructBinaryTree(pre[1:1+index], tin[:index])\n root.right = self.reConstructBinaryTree(pre[1+index:], tin[index+1:])\n except:\n return None\n return root\n\na = [1,2]\ntry:\n i = a.index(3)\n print(i)\nexcept:\n print(-1)\n" }, { "alpha_fraction": 0.4705532491207123, "alphanum_fraction": 0.5336109399795532, "avg_line_length": 27.03333282470703, "blob_id": "f076be234f825b0e19a55be8f94898ea638533df", "content_id": "0f54330d6e6e9f9ed1e9473d68bb5635dd5d2117", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1823, "license_type": "no_license", "max_line_length": 126, "num_lines": 60, "path": "/面试与笔试/笔试/蘑菇街/111.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# # coding=utf-8\n# for i in range(1):\n# filename = \"{0:03d}.txt\".format(i) # 这样子就可以输出文件名是001~009,然后\n# print filename\n# f = open(filename, 'r')\n# lines = f.readlines() # lines保存文件中所有的行\n# for line in lines:\n# numbers = list(map(float, line.strip().split(','))) # 比如说一行有8个数字,以空格分隔,这里会把这8个数字转换成int型放在数组number中[1,2,3,4,5,6,7,8]\n# print(numbers[1])\n#\n#\n# #coding=utf-8\n# import cv\n#\n# f = open(\"000.txt\",'r')\n#\n# lines = f.readlines()\n#\n# for line in lines:\n# numbers = list(map(float,line.strip().split(',')))\n# print(numbers)\n# flame = numbers[0]\n# oid = numbers[1]\n# sx1 = numbers[2]\n# sx2=numbers[2]+numbers[4]\n# sy1=numbers[3]\n# sy2=numbers[3]+numbers[5]\n# name = \"000001\"\n# im = cv.imread(\"data/000004.png\") # 读取指定路径图片\n# cv2.rectangle(im, (int(sx1), int(sy1)), (int(sx2), int(sy2)), (0, 255, 0), 3)\n# if (sy1 > 10):\n# cv2.putText(im, oid, (int(sx1), int(sy1 - 6)), cv2.FONT_HERSHEY_COMPLEX_SMALL, 0.8, (0, 255, 0))\n# else:\n# cv2.putText(im, oid, (int(sx1), int(sy1 + 15)), cv2.FONT_HERSHEY_COMPLEX_SMALL, 0.8, (0, 255, 0))\n# # cv2.imshow('MultiTracker', frame)#也可以存储\n# cv2.imwrite(\"image/000000.png\", gray)\n# break\n\n\n\n#coding=utf-8\nimport sys\n\ndef max_seq(nums):\n dp = [float(\"-inf\")] * len(nums)\n for i in range(len(nums)):\n if i == 0:\n dp[i] = nums[i]\n else:\n dp[i] = dp[i-1] + nums[i]\n max_len = 0\n dic = {}\n for j in range(len(dp)):\n if dp[j] in dic.keys():\n max_len = max(max_len, j - dic[dp[j]])\n else:\n dic[dp[j]] = j\n return max_len\n\nprint max_seq([0,-1,1,3,-3])" }, { "alpha_fraction": 0.39741936326026917, "alphanum_fraction": 0.41806450486183167, "avg_line_length": 18.897436141967773, "blob_id": "bf6e623fe7e09dbc535070d47e7a2739b9294aa0", "content_id": "2e0673f0703cbd3e3f5adb2fdedcfe6b330a193c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 775, "license_type": "no_license", "max_line_length": 50, "num_lines": 39, "path": "/面试与笔试/笔试/wyhy/3.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "n = int(input())\nfor i in range(n):\n s = input().strip()\n index = []\n for j in range(len(s)):\n if s[j] != \"N\":\n index.append(j)\n if len(index) < 3:\n print(len(s))\n else:\n if index[0] != 0:\n index = [0] + index\n if index[-1] != len(s) - 1:\n index = index + [len(s) - 1]\n\n print(index)\n diff = []\n k = 1\n while k < len(index):\n diff.append(index[k] - index[k-1])\n k += 1\n print(diff)\n max_len = 0\n i = 0\n j = 3\n\n while j <= len(diff):\n max_len = max(max_len, sum(diff[i:j]))\n i += 1\n j += 1\n print(max_len)\n\n'''\n4\nNNTN\nNNNNGGNNNN\nNGNNNNGNNNNNNNNSNNNN\nNGNNNNGNNNNNNNNSNNNNS\n'''" }, { "alpha_fraction": 0.5231277346611023, "alphanum_fraction": 0.5253304243087769, "avg_line_length": 20.0930233001709, "blob_id": "9c589ea98bb49eb8868c97afd9c6d044c1d88226", "content_id": "4c9cd1229517c79d9e49212ef4827ef09bc78be9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1038, "license_type": "no_license", "max_line_length": 52, "num_lines": 43, "path": "/offer/24.反转链表.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n题目描述\n输入一个链表,反转链表后,输出新链表的表头。\n\"\"\"\nclass ListNode:\n def __init__(self, x):\n self.val = x\n self.next = None\n\n\"\"\"\n思路:有循环和递归两种实现方式\n鲁棒性:输入为空;输入只包含一个节点\n\"\"\"\nclass Solution:\n # 返回ListNode\n def ReverseList1(self, pHead):\n # 循环\n if not pHead or not pHead.next:\n return pHead\n pre = None\n cur = pHead\n while cur:\n next = cur.next\n cur.next = pre\n pre = cur\n cur = next\n return pre\n\n def ReverseList(self, pHead):\n # 递归\n if not pHead or not pHead.next:\n return pHead\n pre = None\n return self.ReverseListRecursion(pre, pHead)\n\n def ReverseListRecursion(self, pre, cur):\n if not cur:\n return pre\n node = cur.next\n cur.next = pre\n pre = cur\n return self.ReverseListRecursion(pre, node)\n\n" }, { "alpha_fraction": 0.43112701177597046, "alphanum_fraction": 0.43828263878822327, "avg_line_length": 19.740739822387695, "blob_id": "8463b90e06face623841b2fac498baee51000049", "content_id": "5a249ab953278c5014c24cf63c881ef428a61031", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 559, "license_type": "no_license", "max_line_length": 44, "num_lines": 27, "path": "/面试与笔试/笔试/wy/3.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "class Solution:\n def odd(self, n):\n for i in n:\n if i % 2 == 0:\n return False\n return True\n\n def even(self, n):\n for i in n:\n if i % 2 != 0:\n return False\n return True\n def smallest(self, n):\n if self.even(n) or self.odd(n):\n return n\n n.sort()\n return n\n\nif __name__=='__main__':\n s = Solution()\n n = input()\n num = list(map(int, input().split(' ')))\n\n\n res = s.smallest(num)\n for each in res:\n print(each, end=' ')" }, { "alpha_fraction": 0.5229110717773438, "alphanum_fraction": 0.5309972763061523, "avg_line_length": 22.25, "blob_id": "36662df10f5cc57fd71b92bf943fd7712b06e691", "content_id": "c5b440d3ecdca0e1bdae1cd31bdce2468805a691", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 389, "license_type": "no_license", "max_line_length": 62, "num_lines": 16, "path": "/面试与笔试/笔试/xm/2.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n最长连续子序列的和\n\"\"\"\nimport sys\nclass Solution(object):\n def max_sub_array(self, nums):\n if not nums:\n return 0\n res = tmp = nums[0]\n for n in nums[1:]:\n tmp = max(tmp + n, n)\n res = max(res, tmp)\n return res\n\nnums = list(map(int, sys.stdin.readline().strip().split(\" \")))\nprint(Solution().max_sub_array(nums))" }, { "alpha_fraction": 0.519911527633667, "alphanum_fraction": 0.5383480787277222, "avg_line_length": 32.92499923706055, "blob_id": "6c25aab6612b464336352f4d06c44fa991d19e42", "content_id": "3ee3ce5aa25f16a6944e6c94b5a47b628839372a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1730, "license_type": "no_license", "max_line_length": 98, "num_lines": 40, "path": "/offer/19.正则表达式匹配.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n题目描述\n请实现一个函数用来匹配包括'.'和'*'的正则表达式。模式中的字符'.'表示任意一个字符,\n而'*'表示它前面的字符可以出现任意次(包含0次)。 在本题中,匹配是指字符串的所有\n字符匹配整个模式。例如,字符串\"aaa\"与模式\"a.a\"和\"ab*ac*a\"匹配,但是与\"aa.a\"和\"ab*a\"均\n不匹配\n\"\"\"\nclass Solution:\n # s, pattern都是字符串\n def match(self, s, pattern):\n # write code here\n if s is None or pattern is None:\n return False\n return self.match_core(s, pattern)\n\n def match_core(self, s, pattern):\n # 1. 两者都到尾部,返回True\n if not len(s) and not len(pattern):\n return True\n # 2. 字符串没到尾部,pattern到尾部\n if len(s) and not len(pattern):\n return False\n # 3. 当pattern的下一个字符为*时\n if len(pattern) > 1 and pattern[1] == '*':\n # 4. 如果当前字符匹配或者(当前pattern为.且s没到结尾)\n if len(s) > 0 and (s[0] == pattern[0] or pattern[0] == '.'):\n return self.match_core(s[1:], pattern[2:]) or self.match_core(s[1:], pattern) or \\\n self.match_core(s, pattern[2:])\n else:\n # 5. 当前字符不匹配 当前pattern为.且s到结尾 当前字符不为.\n return self.match_core(s, pattern[2:])\n # 6. 下一个pattern不为*时\n if len(s) > 0 and (s[0] == pattern[0] or pattern[0]=='.'):\n return self.match_core(s[1:], pattern[1:])\n return False\n\n\n# print(Solution().match(\"aaa\", \"a.a\"))\nprint(Solution().match(\"\", \".\"))" }, { "alpha_fraction": 0.5151515007019043, "alphanum_fraction": 0.5562770366668701, "avg_line_length": 24, "blob_id": "37f2a21c9da41bb4aa24fe5f50506cfd37829b6f", "content_id": "073586173560599e74af0b3defe2c28d7ed4777c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1214, "license_type": "no_license", "max_line_length": 69, "num_lines": 37, "path": "/offer/61.扑克牌中的顺子.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n从一副扑克牌中抽5张,判断这五张是否是连续的,2~10为数字本身,A为1,J为11,Q为12,K为13,大小王为可以变成\n\"\"\"\n\n'''\n思路:输入的就是数字,0表示的是大小王,首先先对数组排一个序,然后统计0的个数,然后在遍历数组统计gap的个数,\n如果前一个元素和后一个元素相等,那么直接返回false,如果不想等则计算gap值,最后比较0的个数是不是大于等于gap值。\n'''\nclass Solution:\n def IsContinuous(self, numbers):\n # write code here\n if not numbers:\n return False\n numbers.sort()\n\n count_0 = 0\n i = 0\n while i < len(numbers) and numbers[i] == 0:\n count_0 += 1\n i += 1\n\n small = count_0\n big = small + 1\n count_gap = 0\n while big < len(numbers):\n if numbers[small] == numbers[big]:\n return False\n count_gap = count_gap + numbers[big] - numbers[small] - 1\n small = big\n big += 1\n\n return count_0 >= count_gap\n\n\nprint(Solution().IsContinuous([1,3,2,5,4]))\nprint(Solution().IsContinuous([3,5,0,6,0]))" }, { "alpha_fraction": 0.4696570038795471, "alphanum_fraction": 0.5065963268280029, "avg_line_length": 28.230770111083984, "blob_id": "e1fffad00d6bfcf4376ebc2505a4290cfa38aa4b", "content_id": "67b10da5dc4b556a38fe5ed74b6e00220ecbd9c6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 535, "license_type": "no_license", "max_line_length": 63, "num_lines": 13, "path": "/面试与笔试/背包问题-多重背包问题.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n多重背包问题,给定数量的物体,第一个是1个物体,第二个是无限物体,这个是介于之间,给定物体数目\n方法1:再多加个循环试一下k个物体的价值, 这次直接上优化完的一维数组的\n\n\"\"\"\ndef pack3(w, v, s, c):\n dp = [0 for _ in range(c+1)]\n for i in range(1, len(w)+1):\n for j in reversed(range(1, c+1)):\n for k in range(s[i-1] + 1):\n if k*w[i-1] <= j:\n dp[j] = max(dp[j], dp[j-k*w[i-1]]+k*v[i-1])\n return dp[c]" }, { "alpha_fraction": 0.5015576481819153, "alphanum_fraction": 0.5264797210693359, "avg_line_length": 23.69230842590332, "blob_id": "6f3b8fa41a4cb436b78d8f9556dde3ddcd097da8", "content_id": "780826e64cbc74ac0b94a834770dca73f1211432", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 822, "license_type": "no_license", "max_line_length": 64, "num_lines": 26, "path": "/offer/57.和为s的两个数.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "'''\n题目描述\n输入一个递增排序的数组和一个数字S,在数组中查找两个数,使得他们的和正好是S,如果有多对数字的和等于S,输出两个数的乘积最小的。\n输出描述:\n对应每个测试案例,输出两个数,小的先输出。\n'''\n\n# -*- coding:utf-8 -*-\nclass Solution:\n def FindNumbersWithSum(self, array, tsum):\n # write code here\n if not array or len(array) < 2:\n return []\n left, right = 0, len(array) - 1\n while left < right:\n s = array[left] + array[right]\n if s == tsum:\n return array[left], array[right]\n elif s > tsum:\n right -= 1\n else:\n left += 1\n\n return []\n\nprint(Solution().FindNumbersWithSum([1,2,4,7,11,16],10))\n" }, { "alpha_fraction": 0.5855855941772461, "alphanum_fraction": 0.5990990996360779, "avg_line_length": 21.233333587646484, "blob_id": "494b2a4f314d0c829b18eaf2775ba28a08a475a6", "content_id": "91020822f5b1348448c00eba7fe5db560d34d1ee", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 666, "license_type": "no_license", "max_line_length": 98, "num_lines": 30, "path": "/offer/54.二叉搜索树的第k大节点-230.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "class TreeNode:\n def __init__(self, x):\n self.left = None\n self.right = None\n self.val = x\n\nclass Solution:\n def inorder_traversal(self, root):\n if not root:\n return []\n return self.inorder_traversal(root.left) + [root.val] + self.inorder_traversal(root.right)\n\n def KthNode(self, pRoot, k):\n if not pRoot:\n return None\n nums = self.inorder_traversal(pRoot)\n return nums[k-1]\n\n\nroot = TreeNode(5)\na = TreeNode(3)\nb = TreeNode(7)\nroot.left = a\nroot.right = b\na.left = TreeNode(2)\na.right = TreeNode(4)\nb.left = TreeNode(6)\nb.right = TreeNode(8)\n\nprint(Solution().KthNode(root, 3))" }, { "alpha_fraction": 0.6393442749977112, "alphanum_fraction": 0.7704917788505554, "avg_line_length": 58, "blob_id": "c3254d82bd89a52f64daef734831321a3970ae9b", "content_id": "ade47a1ba42bf68cca735f9168bc3b4a36113363", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 61, "license_type": "no_license", "max_line_length": 58, "num_lines": 1, "path": "/面试与笔试/链表的快排.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# https://blog.csdn.net/otuhacker/article/details/10366563\n\n\n" }, { "alpha_fraction": 0.5249344110488892, "alphanum_fraction": 0.5380577445030212, "avg_line_length": 24.46666717529297, "blob_id": "b135317e3061a84e3e0ff42a6f4150cfae20b6be", "content_id": "3aba7bc791ac668e0883c492844ccdec03b75f43", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 381, "license_type": "no_license", "max_line_length": 46, "num_lines": 15, "path": "/面试与笔试/笔试/pingan/33.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\ndef max_deliver(child, n):\n avg = sum(child) / n\n ans = 0\n for i in range(len(child)):\n if abs(child[i] - avg) % 2 != 0:\n return -1\n elif child[i] < avg:\n ans += int((avg - child[i])/2)\n return ans\n\nn = int(input())\nline = sys.stdin.readline().strip().split(\" \")\nchild = list(map(float, line))\nprint(max_deliver(child, n))" }, { "alpha_fraction": 0.47767481207847595, "alphanum_fraction": 0.5122156739234924, "avg_line_length": 25.399999618530273, "blob_id": "0d0809b284bd2b154f63055ce716cd84afd74f30", "content_id": "eb9facc2e5ccdb974d761aeb9b3607379687589c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1187, "license_type": "no_license", "max_line_length": 136, "num_lines": 45, "path": "/最长公共(子)/最大子序列和.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\nGiven an integer array nums, find the contiguous subarray (containing at least one number) which has the largest sum and return its sum.\n\nExample:\n\nInput: [-2,1,-3,4,-1,2,1,-5,4],\nOutput: 6\nExplanation: [4,-1,2,1] has the largest sum = 6.\n\"\"\"\n\"\"\"\ndp:\ndp[i]=nums[i] if dp[i-1]<0 or i = 0\n nums[i] + dp[i-1] if i!= 0 and dp[i-1] > 0\n\"\"\"\nclass Solution(object):\n def maxSubArray1(self, nums):\n \"\"\"\n :type nums: List[int]\n :rtype: int\n \"\"\"\n max_sum = float('-inf')\n now_sum = 0\n for i in range(len(nums)):\n now_sum += nums[i]\n if now_sum > max_sum:\n max_sum = now_sum\n if now_sum < 0:\n now_sum = 0\n return max_sum\n def maxSubArray(self, nums):\n \"\"\"\n :type nums: List[int]\n :rtype: int\n \"\"\"\n if not nums:\n return 0\n maxSum = current = nums[0]\n for n in nums[1:]:\n current = max(current + n, n)\n maxSum = max(maxSum, current)\n return maxSum\n\nprint(Solution().maxSubArray([-1,2,3,4,5,6,-5,4,-7]))\nprint(Solution().maxSubArray([-1]))\nprint(Solution().maxSubArray([-2,1]))" }, { "alpha_fraction": 0.5145067572593689, "alphanum_fraction": 0.5183752179145813, "avg_line_length": 27.72222137451172, "blob_id": "d6de0f4f81d6ac3c5294d93fa83ec48f9c6cfcfc", "content_id": "21a26095e95ad64b22ed78bf8c70fc0a61e9d61e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1394, "license_type": "no_license", "max_line_length": 56, "num_lines": 36, "path": "/offer/8.二叉树的下一个结点.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "\"\"\"\n题目描述\n给定一个二叉树和其中的一个结点,请找出中序遍历顺序的下一个结点并且返回。\n注意,树中的结点不仅包含左右子结点,同时包含指向父结点的指针。\n\"\"\"\n# -*- coding:utf-8 -*-\n# class TreeLinkNode:\n# def __init__(self, x):\n# self.val = x\n# self.left = None\n# self.right = None\n# self.next = None\nclass Solution:\n def GetNext(self, pNode):\n # 分三种情况讨论:\n # 1. 若该节点有右节点,则返回右节点最下面的左节点\n # 2. 若该节点没有右节点,且其为其父节点的左节点,则返回其父节点\n # 3. 若该节点为其父节点的右节点,则沿着指向父节点的指针一直往上找,直到\n # 找到一个是它父节点的左子节点的节点\n if not pNode:\n return None\n if pNode.right:\n pNode = pNode.right\n while pNode.left:\n pNode = pNode.left\n return pNode\n elif pNode.next and pNode.next.left == pNode:\n return pNode.next\n else:\n while pNode.next:\n if not pNode.next.next:\n return None\n elif pNode.next.next.left == pNode.next:\n return pNode.next.next\n else:\n pNode = pNode.next\n" }, { "alpha_fraction": 0.4833555221557617, "alphanum_fraction": 0.4993342161178589, "avg_line_length": 23.25806427001953, "blob_id": "86921378cffe6784dd52020ae607028f3d0e70dc", "content_id": "c7638a4522cb54dd6764ec1a60070b5fa28ad4a0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 873, "license_type": "no_license", "max_line_length": 65, "num_lines": 31, "path": "/offer/18.1删除链表中重复的结点.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\"\"\"\n题目描述\n在一个排序的链表中,存在重复的结点,请删除该链表中重复的结点,重复的结点不保留,返回链表头指针。\n例如,链表1->2->3->3->4->4->5 处理后为 1->2->5\n\"\"\"\n\n\nclass ListNode:\n def __init__(self, x):\n self.val = x\n self.next = None\n\n\nclass Solution:\n def deleteDuplication(self, pHead):\n #\n if not pHead:\n return None\n p = pre = ListNode(0)\n pre.next = pHead\n while pHead and pHead.next:\n if pHead.next.val == pHead.val:\n while pHead.next and pHead.next.val == pHead.val:\n pHead.next = pHead.next.next\n pHead = pHead.next\n pre.next = pHead\n else:\n pre = pre.next\n pHead = pHead.next\n return p.next" }, { "alpha_fraction": 0.4537608027458191, "alphanum_fraction": 0.49938347935676575, "avg_line_length": 18.33333396911621, "blob_id": "27ac180e99573b2985749fabe4acffc0b11372b8", "content_id": "88c6cbf4de9c87971e732f85657bd64af7e94bf6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 811, "license_type": "no_license", "max_line_length": 61, "num_lines": 42, "path": "/面试与笔试/笔试/hulu/2.py", "repo_name": "xiaomojie/NowCoder", "src_encoding": "UTF-8", "text": "import sys\nn = int(input().strip())\narr = list(map(int, sys.stdin.readline().strip().split(' ')))\n\nres = 0\n# for i in range(n):\n# cur_max = arr[i]\n# for j in range(i, n):\n# cur_max = max(cur_max, arr[j])\n# res += cur_max\n# # res += max(arr[i:j+1])\n# print(res%1000000007)\n\ndp = [0]*n\nfor i in range(n):\n # if i == 0:\n # dp[i] = arr[i]\n # else:\n # dp[i] =\n dp[i] = sum\n\n res += dp[i] * (i+1)\n\n# for i in range(n):\n# res += sum(dp[i:])\nprint(res%1000000007)\n\n\n\n\nimport sys\nn = int(input().strip())\narr = list(map(int, sys.stdin.readline().strip().split(' ')))\n\nres = 0\nfor i in range(n):\n cur_max = arr[i]\n for j in range(i, n):\n cur_max = max(cur_max, arr[j])\n res += cur_max\n # res += max(arr[i:j+1])\nprint(res%1000000007)" } ]
197
amitsood/autoencoder_visualization
https://github.com/amitsood/autoencoder_visualization
efe5dfacfca33bfbeb745329b2ce390afc16d4a0
3e3b4bd78b80a58778def23620de6099417f2f5f
94deeb0afbf730f36eac5e92c1950d58e973435e
refs/heads/master
2020-05-30T20:02:43.949090
2019-06-03T09:31:37
2019-06-03T09:31:37
189,939,654
0
0
null
2019-06-03T05:24:26
2019-05-13T03:24:03
2019-05-12T21:58:16
null
[ { "alpha_fraction": 0.6219512224197388, "alphanum_fraction": 0.6722561120986938, "avg_line_length": 16.263158798217773, "blob_id": "4e405c29a98a9f56257dc07ac54376ffba7c1364", "content_id": "b4c0d5e0be351eac2715d364baff49f993d2bd69", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 656, "license_type": "permissive", "max_line_length": 51, "num_lines": 38, "path": "/nn_viz_00.py", "repo_name": "amitsood/autoencoder_visualization", "src_encoding": "UTF-8", "text": "\"\"\"\nGenerate a autoencoder neural network visualization\n\"\"\"\nFIGURE_WIDTH=16\nFIGURE_HEIGHT=9\nRIGHT_BORDER0=0.7\nLEFT_BORDER=0.7\nTOP_BORDER=0.8\nBOTTOM_BORDER=0.6\n\nN_IMAGE_PIXEL_COLS=64\nN_IMAGE_PIXEL_ROWS=48\nN_NODES_BY_LAYER=[10,7,5,8]\n\nINPUT_IMAGE_BOTTOM=5\nINPUT_IMAGE_HEIGHT= 0.25* FIGURE_HEIGHT\nERROR_IMAGE_SCALE=0.7\nERROR_GAP_SCALE=0.3\nBETWEEN_LAYER_SCALE=0.8\nBETWEEN_NODE_CALE=0.4\n\ndef main():\n p = construct_parameters()\n \n\ndef contruct\ndef construct_parameters():\n parameters={}\n \n parameters['figure']={\n \"height\"=FIGURE_HEIGHT,\n \"width\"=FIGURE_WIDTH\n }\n return parameters\n \n\nif __name__ == \"__main__\":\n main()\n" } ]
1
B-Racich/MySummerTunes
https://github.com/B-Racich/MySummerTunes
ce5ac1f96fc71304b21ff1e6b89105cf678033c8
166c3a22ceba482abb9409b2bdb818fce44f89e5
cc9f2533af98767ad8599ab49a79730bad33f13a
refs/heads/master
2021-06-20T20:11:04.355452
2021-06-08T03:58:54
2021-06-08T03:58:54
209,903,779
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.8333333134651184, "alphanum_fraction": 0.8333333134651184, "avg_line_length": 23, "blob_id": "8971dcffcea7a3a1aa6c6aa17dff3512f1bc443d", "content_id": "5fd232be67b4145d6fa84f8967278e3a5e700453", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 48, "license_type": "no_license", "max_line_length": 31, "num_lines": 2, "path": "/README.md", "repo_name": "B-Racich/MySummerTunes", "src_encoding": "UTF-8", "text": "# MySummerTunes\n MSC music downloader/converter\n" }, { "alpha_fraction": 0.5245863199234009, "alphanum_fraction": 0.5262176394462585, "avg_line_length": 32.0076904296875, "blob_id": "8f95b3d67d8594df3f8513ada6f96ae936e939a6", "content_id": "be602b380956dc834d431687cc9a2ba1fb707662", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4291, "license_type": "no_license", "max_line_length": 93, "num_lines": 130, "path": "/MySummerTunes/venv/FileSystem.py", "repo_name": "B-Racich/MySummerTunes", "src_encoding": "UTF-8", "text": "from tkinter.filedialog import askdirectory\nimport os\nfrom Media import Media\nimport html\n\n\nclass FileSystem:\n\n def __init__(self, gui):\n self.gui = gui\n self.appdata_path = os.getenv('LOCALAPPDATA')\n self.data_path = self.appdata_path + '\\\\MySummerTunes'\n\n if os.path.exists(self.data_path):\n self.log_path = self.data_path + '\\\\Tunes Log.txt'\n self.settings_path = self.data_path + '\\\\Settings.txt'\n else:\n os.mkdir(self.data_path)\n self.log_path = self.data_path + '\\\\Tunes Log.txt'\n self.settings_path = self.data_path + '\\\\Settings.txt'\n\n \"\"\"\n Sets the download path\n \"\"\"\n def set_download_path(self):\n download_path = askdirectory()\n has_replaced = False\n with open(self.settings_path, \"r\") as f:\n lines = f.readlines()\n with open(self.settings_path, \"w\") as f:\n for line in lines:\n if \"downloads = \" not in line:\n f.write(line)\n elif \"downloads = \" in line:\n f.write(\"downloads = \"+download_path+'\\n')\n has_replaced = True\n if not has_replaced:\n file = open(self.settings_path, 'a')\n file.write('downloads = ' + download_path)\n file.close()\n\n \"\"\"\n Returns the set download path or false if not set\n \"\"\"\n def get_download_path(self):\n settings_file = open(self.settings_path, 'r')\n for line in settings_file:\n if 'downloads = ' in line:\n settings_file.close()\n return line.split('=')[1].strip()\n settings_file.close()\n return False\n\n \"\"\"\n Checks if the log contains the track, returns boolean\n \"\"\"\n def log_contains(self, item):\n log_file = open(self.log_path, 'r', encoding='utf-8')\n for line in log_file:\n if item in line:\n log_file.close()\n return True\n log_file.close()\n return False\n\n \"\"\"\n Adds the track to log\n \"\"\"\n def log_add(self, track):\n track_number = self.find_next_track_num()\n with open(self.log_path, \"a\", encoding='utf-8') as f:\n line = str(track) + '\\t:ENCODED TRACK NAME:\\t' + 'track'+str(track_number) + '\\n'\n f.write(line)\n self.rename_track(track, track_number)\n\n \"\"\"\n Removes the track from log\n \"\"\"\n def log_remove(self, item):\n with open(self.log_path, \"r\") as f:\n lines = f.readlines()\n with open(self.log_path, \"w\") as f:\n for line in lines:\n if item not in line:\n f.write(line)\n\n \"\"\"Finds the next track number\"\"\"\n def find_next_track_num(self):\n current = 1\n with open(self.log_path, \"r\", encoding='utf-8') as f:\n lines = f.readlines()\n for line in lines:\n delim = \"\\t:ENCODED TRACK NAME:\\t\"\n if delim in line:\n track_num = line.split(\"\\t:ENCODED TRACK NAME:\\t\")[1]\n track_num = track_num.strip('track')\n if int(track_num) == current:\n current = current+1\n return current\n\n \"\"\"Rename track\"\"\"\n def rename_track(self, track, track_num):\n if \"/\" in track:\n track = track.replace(\"/\", \"_\")\n elif \"\\\"\" in track:\n track = track.replace(\"\\\"\", \"'\")\n src = self.get_download_path()+'/'+track+'.ogg'\n dst = self.get_download_path()+'/track'+str(track_num)+'.ogg'\n os.rename(src, dst)\n\n \"\"\"\n Downloads the youtube video audio via URL\n \"\"\"\n def youtube_download(self):\n # Get video\n url = self.gui.getUrl()\n media = Media(url)\n title = html.unescape(media.title)\n self.gui.video_title_text.set(title)\n\n # Check if downloaded\n if not self.log_contains(title):\n self.gui.status_label_text.set(\"Downloading song...\")\n path = self.get_download_path() + '/%(title)s.%(ext)s'\n media.download(path)\n self.log_add(title)\n self.gui.status_label_text.set(\"Added to log\")\n\n else:\n self.gui.status_label_text.set(\"Song already downloaded!\")\n" }, { "alpha_fraction": 0.48300284147262573, "alphanum_fraction": 0.4886685609817505, "avg_line_length": 24.214284896850586, "blob_id": "c08a9f8a48e63c30c9885f258bd3c0f627a77ede", "content_id": "309dbc4df1eec8ae04092d0038007c10dbb371c1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 706, "license_type": "no_license", "max_line_length": 51, "num_lines": 28, "path": "/MySummerTunes/venv/Media.py", "repo_name": "B-Racich/MySummerTunes", "src_encoding": "UTF-8", "text": "import youtube_dl\nfrom pytube import YouTube\n\n\nclass Media:\n\n def __init__(self, url):\n # if str(url).find('&'):\n # url = str(url).split('&')[0]\n self.url = url\n yt = YouTube(url)\n self.title = yt.title\n\n def download(self, path):\n ydl_opts = {\n 'format': 'bestaudio/best',\n 'postprocessors': [{\n 'key': 'FFmpegExtractAudio',\n 'preferredcodec': 'vorbis',\n 'preferredquality': '192',\n }],\n 'outtmpl': path\n }\n with youtube_dl.YoutubeDL(ydl_opts) as ydl:\n ydl.download([self.url])\n\n def toString(self):\n print(self.url, self.title)\n" }, { "alpha_fraction": 0.6153517961502075, "alphanum_fraction": 0.6285714507102966, "avg_line_length": 30.689189910888672, "blob_id": "dd55a8ccf6be6b829862bca3b9d7b0e82e007260", "content_id": "a5a8cb2624dd836ec26ad435cf8daa605316e04b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2345, "license_type": "no_license", "max_line_length": 123, "num_lines": 74, "path": "/MySummerTunes/venv/GUI.py", "repo_name": "B-Racich/MySummerTunes", "src_encoding": "UTF-8", "text": "from __future__ import unicode_literals\nfrom tkinter import *\nfrom FileSystem import FileSystem\n\n\nclass GUI:\n\n def __init__(self, master):\n self.master = master\n self.file_system = FileSystem(self)\n master.title(\"My Summer Tunes\")\n self.status_label_text = StringVar()\n self.video_title_text = StringVar()\n self.location_label_text = StringVar()\n\n #Left panel\n left_panel = Frame(master)\n left_panel.grid(row=0, column=0)\n\n \"\"\"URL\"\"\"\n self.urlLabel = Label(left_panel, text=\"Youtube URL: \")\n self.urlLabel.grid(row=0, column=0)\n\n self.urlEntry = Entry(left_panel)\n self.urlEntry.grid(row=0, column=1)\n\n \"\"\"TITLE\"\"\"\n self.videoLabel = Label(left_panel, text=\"Video Title: \")\n self.videoLabel.grid(row=1, column=0)\n\n self.videoTitle = Label(left_panel, textvariable=self.video_title_text)\n self.videoTitle.grid(row=1, column=1)\n\n \"\"\"TYPE\"\"\"\n self.videoTypeLabel = Label(left_panel, text=\"Video Type: \")\n self.videoTypeLabel.grid(row=2, column=0)\n\n self.videoType = Label(left_panel)\n self.videoType.grid(row=2, column=1)\n\n \"\"\"DOWNLOAD BTN\"\"\"\n self.downloadBtn = Button(left_panel, text=\"Download\", command=self.file_system.youtube_download)\n self.downloadBtn.grid(row=3, column=0)\n\n self.statusLabel = Label(left_panel, textvariable=self.status_label_text)\n self.statusLabel.grid(row=3, column=1)\n\n \"\"\"DOWNLOAD LOCATION BTN\"\"\"\n self.setDownloadsBtn = Button(left_panel, text=\"Set Download Location\", command=self.file_system.set_download_path)\n self.setDownloadsBtn.grid(row=4, column=0)\n\n self.locationLabel = Label(left_panel, textvariable=self.location_label_text)\n self.locationLabel.grid(row=4, column=1)\n\n self.set_ver_num = Label(left_panel, text='0.1.0')\n self.set_ver_num.grid(row=5, column=0)\n\n #Right panel\n right_panel = Frame(master)\n right_panel.grid(row=0, column=1)\n\n self.logList = Listbox(right_panel)\n self.logList.grid(row=0, column=0)\n\n \"\"\"INIT\"\"\"\n self.location_label_text.set(self.file_system.get_download_path())\n\n def getUrl(self):\n return self.urlEntry.get()\n\n\nroot = Tk()\napp = GUI(root)\nroot.mainloop()\n" } ]
4
aiqiulin12/tensorflow_train
https://github.com/aiqiulin12/tensorflow_train
0715025d91a6a81ebe8cf4827da127406a17e46b
6689bf5fcd42006aa6d697152abb4477789ec08a
5ce71fbf4962856fd7679454ee35c63ac24abd5d
refs/heads/main
2023-02-15T17:39:35.617842
2020-12-26T11:38:06
2020-12-26T11:38:06
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7758151888847351, "alphanum_fraction": 0.8383151888847351, "avg_line_length": 51.57143020629883, "blob_id": "ca63c1bf16f076d051d7ea94aaf4b83712d2c64e", "content_id": "b4af6b7144cd5de211ab7e5eb55be3b35fe14beb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1764, "license_type": "no_license", "max_line_length": 132, "num_lines": 14, "path": "/TensorFlow_train.md", "repo_name": "aiqiulin12/tensorflow_train", "src_encoding": "UTF-8", "text": "# tensorflow_train\n用TensorFlow框架训练模型数据\n1.创建Python项目,使用自己的名字全拼命名项目,在项目中创建Python文件文件称为“ idcard.py”\n2.编写一个由一个输入层,两个隐藏层和一个输出层组成的多个全连接神经网络模型,其中两个隐藏层都是全连接层\n3.编写随机训练样本进行50万次训练,并打印每次训练的误差以及平均误差\n4.对如何对训练模型进行进一步优化\n二,程序设计题\n准备钩叉问题的黑白图片训练样本数据,设计该问题的神经网络模型并实现对训练模型的测试。\n1.在第一题Python项目中重新创建Python文件文件称为“ recognition.py”。\n2.创建包含15组训练样本的文件“ checkData.txt”,文件中前25列数字代表样本的图像数据,后三列是样本的分类号,分类号分别为“ 1,0,0”,“ 0,1,0”和“ 0,0,1”,训练样本文件如下1所示:\n3.在“ recognition.py”文件中编写程序,实现多层卷积神经网络模型。其中有三个卷积层,每个卷积核的卷积形态是[2,2,1,1],步长是[1,1,1,1],第一次卷积的padding更改为“ SAME”,第二次卷积的padding更改为“ VALID”\n4.使用“正交熵”损失函数对样本进行训练,指定训练次数是10000轮,学习率是0.0001。\n5.训练完毕后,用三个不同的测试数据来测试最后神经网络的训练成果,三个测试的数据都没有和训练数据重复,并且“钩”,“叉”,“无法识别” 3中情况各占一个\n6.上述三个测试样本进行计算发现,只有第二个样本“叉”的判断比较正确,如图2所示,请调整如何优化卷积神经网络对图片识别的准确率。\n" }, { "alpha_fraction": 0.4904996454715729, "alphanum_fraction": 0.5686136484146118, "avg_line_length": 31.83333396911621, "blob_id": "919c8d4be06853e75af7438a44cc48875a2d990d", "content_id": "3f4e0b323f08bde8d66d220fa17af7ced0f21489", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2866, "license_type": "no_license", "max_line_length": 118, "num_lines": 84, "path": "/recognition.py", "repo_name": "aiqiulin12/tensorflow_train", "src_encoding": "UTF-8", "text": "# @Author : 兰良炜\r\n# @File : recognition.py\r\nimport tensorflow.compat.v1 as tf\r\ntf.disable_v2_behavior()\r\nimport numpy as np\r\nimport pandas as pd\r\nimport sys\r\n\r\nroundCount = 10000\r\nlearnRate = 0.0001\r\n\r\nargt = sys.argv[1:]\r\n\r\nfor v in argt:\r\n if v.startswith(\"-round=\"):\r\n roundCount = int(v[len(\"-round=\"):])\r\n if v.startswith(\"-learnrate=\"):\r\n learnRate = float(v[len(\"-learnrate=\"):])\r\n\r\nfileData = pd.read_csv('E:\\\\aiqiulin\\\\checkData.txt', dtype=np.float32, header=None)\r\n\r\nwholeData = fileData.iloc[:, :].values\r\n\r\nrowCount = wholeData.shape[0]\r\n\r\nprint(\"wholeData=%s\" % wholeData)\r\nprint(\"rowCount=%d\" % rowCount)\r\n#第三问\r\nx = tf.placeholder(shape=[25], dtype=tf.float32)\r\nyTrain = tf.placeholder(shape=[3], dtype=tf.float32)\r\n\r\nfilter1T = tf.Variable(tf.ones([2, 2, 1, 1]), dtype=tf.float32)\r\n\r\nn1 = tf.nn.conv2d(input=tf.reshape(x, [1, 5, 5, 1]), filter=filter1T, strides=[1, 1, 1, 1], padding='SAME')\r\n\r\nfilter2T = tf.Variable(tf.ones([2, 2, 1, 1]), dtype=tf.float32)\r\n\r\nn2 = tf.nn.conv2d(input=tf.reshape(n1, [1, 5, 5, 1]), filter=filter2T, strides=[1, 1, 1, 1], padding='VALID')\r\n\r\nfilter3T = tf.Variable(tf.ones([2, 2, 1, 1]), dtype=tf.float32)\r\n\r\nn3 = tf.nn.conv2d(input=tf.reshape(n2, [1, 4, 4, 1]), filter=filter3T, strides=[1, 1, 1, 1], padding='VALID')\r\n\r\nn3f = tf.reshape(n3, [1, 9])\r\n#第四问\r\nw4 = tf.Variable(tf.random_normal([9, 16]), dtype=tf.float32)\r\nb4 = tf.Variable(0, dtype=tf.float32)\r\n\r\nn4 = tf.nn.tanh(tf.matmul(n3f, w4) + b4)\r\n\r\nw5 = tf.Variable(tf.random_normal([16, 3]), dtype=tf.float32)\r\nb5 = tf.Variable(0, dtype=tf.float32)\r\n\r\nn5 = tf.reshape(tf.matmul(n4, w5) + b5, [-1])\r\n\r\ny = tf.nn.softmax(n5)\r\n\r\nloss = -tf.reduce_mean(yTrain * tf.log(tf.clip_by_value(y, 1e-10, 1.0)))\r\noptimizer = tf.train.RMSPropOptimizer(learnRate)\r\n\r\ntrain = optimizer.minimize(loss)\r\n\r\nsess = tf.Session()\r\nsess.run(tf.global_variables_initializer())\r\n\r\nfor i in range(roundCount):\r\n lossSum = 0.0\r\n\r\n for j in range(rowCount):\r\n result = sess.run([train, x, yTrain, y, loss], feed_dict={x: wholeData[j][0:25], yTrain: wholeData[j][25:28]})\r\n\r\n lossT = float(result[len(result) - 1])\r\n\r\n lossSum = lossSum + lossT\r\n\r\n if j == (rowCount - 1):\r\n print(\"i: %d, loss: %10.10f, avgLoss: %10.10f\" % (i, lossT, lossSum / (rowCount + 1)))\r\n#第五问\r\nprint(sess.run([y, loss], feed_dict={x: [1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\r\n yTrain: [1, 0, 0]}))\r\nprint(sess.run([y, loss], feed_dict={x: [1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1],\r\n yTrain: [0, 1, 0]}))\r\nprint(sess.run([y, loss], feed_dict={x: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0],\r\n yTrain: [0, 0, 1]}))\r\n" }, { "alpha_fraction": 0.5574589371681213, "alphanum_fraction": 0.615988552570343, "avg_line_length": 24.94230842590332, "blob_id": "4588521105b9fb72fa6b6c9781b78f7e79e2463d", "content_id": "dc5df50ac26562416c70bf771d9f7892745201be", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1425, "license_type": "no_license", "max_line_length": 108, "num_lines": 52, "path": "/idcard.py", "repo_name": "aiqiulin12/tensorflow_train", "src_encoding": "UTF-8", "text": "# @Author : 艾秋林\r\n# @File : idcard.py\r\n#第一题\r\nimport tensorflow.compat.v1 as tf\r\ntf.disable_v2_behavior()\r\nimport random\r\n\r\nrandom.seed()\r\n#第二问\r\nx = tf.placeholder(tf.float32)\r\nyTrain = tf.placeholder(tf.float32)\r\n\r\nw1 = tf.Variable(tf.random_normal([4, 32], mean=0.5, stddev=0.1), dtype=tf.float32)\r\nb1 = tf.Variable(0, dtype=tf.float32)\r\n\r\nxr = tf.reshape(x, [1, 4])\r\n\r\nn1 = tf.nn.tanh(tf.matmul(xr, w1) + b1)\r\n\r\nw2 = tf.Variable(tf.random_normal([32, 2], mean=0.5, stddev=0.1), dtype=tf.float32)\r\nb2 = tf.Variable(0, dtype=tf.float32)\r\n\r\nn2 = tf.matmul(n1, w2) + b2\r\n\r\ny = tf.nn.softmax(tf.reshape(n2, [2]))\r\n#第三问\r\nloss = tf.reduce_mean(tf.square(y - yTrain))\r\n\r\noptimizer = tf.train.RMSPropOptimizer(0.01)\r\n\r\ntrain = optimizer.minimize(loss)\r\n\r\nsess = tf.Session()\r\n\r\nsess.run(tf.global_variables_initializer())\r\n\r\nlossSum = 0.0\r\n\r\nfor i in range(500000):\r\n\r\n xDataRandom = [int(random.random() * 10), int(random.random() * 10), int(random.random() * 10),\r\n int(random.random() * 10)]\r\n if xDataRandom[2] % 2 == 0:\r\n yTrainDataRandom = [0, 1]\r\n else:\r\n yTrainDataRandom = [1, 0]\r\n\r\n result = sess.run([train, x, yTrain, y, loss], feed_dict={x: xDataRandom, yTrain: yTrainDataRandom})\r\n\r\n lossSum = lossSum + float(result[len(result) - 1])\r\n\r\n print(\"i: %d, loss: %10.10f, avgLoss: %10.10f\" % (i, float(result[len(result) - 1]), lossSum / (i + 1)))\r\n" } ]
3
eric-ch/buildbot-openxt
https://github.com/eric-ch/buildbot-openxt
510a39645134e3f966499c0ee65eb1cc42320123
8dd2c98dd69d575fbb8acc6c5168fb82e2156995
ee38a2b906b3af14d3a2b1bbd5f7824f51e2be0d
refs/heads/master
2023-05-14T18:12:43.154451
2021-06-08T14:50:24
2021-06-08T14:50:24
272,955,897
0
1
null
2020-06-17T11:25:55
2021-01-25T15:38:38
2021-01-26T00:58:02
Python
[ { "alpha_fraction": 0.5257790088653564, "alphanum_fraction": 0.5274787545204163, "avg_line_length": 29.431034088134766, "blob_id": "dbbcb97e85ad35bcbb1b5040e93ea5c55ff30acf", "content_id": "002d339acce18d5e1c9e1b1a0f6f71d3b1ee24a8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1765, "license_type": "no_license", "max_line_length": 83, "num_lines": 58, "path": "/config/stable_zeus.py", "repo_name": "eric-ch/buildbot-openxt", "src_encoding": "UTF-8", "text": "# Source configuration for OpenXT Zeus stable.\ncodebases_stable_zeus = {\n 'bitbake': {\n 'repository': 'https://git.openembedded.org/bitbake',\n 'branch': '1.44',\n 'revision': '',\n },\n 'meta-openembedded': {\n 'repository': 'https://git.openembedded.org/meta-openembedded',\n 'branch': 'zeus',\n 'revision': '',\n },\n 'openembedded-core': {\n 'repository': 'https://git.openembedded.org/openembedded-core',\n 'branch': 'zeus',\n 'revision': '',\n },\n 'meta-intel': {\n 'repository': 'https://git.yoctoproject.org/git/meta-intel',\n 'branch': 'zeus',\n 'revision': '',\n },\n 'meta-java': {\n 'repository': 'https://git.yoctoproject.org/git/meta-java',\n 'branch': 'zeus',\n 'revision': '',\n },\n 'meta-selinux': {\n 'repository': 'https://git.yoctoproject.org/git/meta-selinux',\n 'branch': 'zeus',\n 'revision': '',\n },\n 'meta-virtualization': {\n 'repository': 'https://git.yoctoproject.org/git/meta-virtualization',\n 'branch': 'zeus',\n 'revision': '',\n },\n 'meta-openxt-haskell-platform': {\n 'repository': 'https://github.com/OpenXT/meta-openxt-haskell-platform.git',\n 'branch': 'zeus',\n 'revision': '',\n },\n 'meta-openxt-ocaml-platform': {\n 'repository': 'https://github.com/OpenXT/meta-openxt-ocaml-platform.git',\n 'branch': 'zeus',\n 'revision': '',\n },\n 'xenclient-oe': {\n 'repository': 'https://github.com/OpenXT/xenclient-oe.git',\n 'branch': 'zeus',\n 'revision': '',\n },\n 'bordel': {\n 'repository': 'https://github.com/OpenXT/bordel.git',\n 'branch': 'master',\n 'revision': '',\n },\n}\n" }, { "alpha_fraction": 0.5186020135879517, "alphanum_fraction": 0.532130777835846, "avg_line_length": 26.71875, "blob_id": "b10745125eba0cd4f419aabd59f8c1077f1b33ae", "content_id": "35aff0c68e02209ca4ca746cc7ae32ca6c85fa4e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 887, "license_type": "no_license", "max_line_length": 68, "num_lines": 32, "path": "/config/windows_tools.py", "repo_name": "eric-ch/buildbot-openxt", "src_encoding": "UTF-8", "text": "# Source configuration for OpenXT Windows tools 8.2.0.\ncodebases_windows_tools_8_2_0 = {\n 'openxt': {\n 'repository': 'https://github.com/OpenXT/openxt.git',\n 'branch': 'master',\n 'revision': '',\n },\n 'xc-windows': {\n 'repository': 'https://github.com/OpenXT/xc-windows.git',\n 'branch': 'master',\n 'revision': '',\n },\n 'msi-installer': {\n 'repository': 'https://github.com/OpenXT/msi-installer.git',\n 'branch': 'master',\n 'revision': '',\n },\n 'xc-vusb': {\n 'repository': 'https://github.com/OpenXT/xc-vusb.git',\n 'branch': 'master',\n 'revision': '',\n }\n}\n\n# Source configuration for OpenXT Windows tools 9.0.0.\ncodebases_windows_tools_9_0_0 = {\n 'win-pv' : {\n 'repository': 'https://github.com/OpenXT/win-pv',\n 'branch': 'master',\n 'revision': '',\n }\n}\n" }, { "alpha_fraction": 0.5254318714141846, "alphanum_fraction": 0.5283109545707703, "avg_line_length": 29.647058486938477, "blob_id": "778bd974c5ca3f2bb7a472f6148077da71bd72c9", "content_id": "34d85f09af93b5fca3b2400ee4cd279275a3dc2c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2084, "license_type": "no_license", "max_line_length": 83, "num_lines": 68, "path": "/config/stable_master.py", "repo_name": "eric-ch/buildbot-openxt", "src_encoding": "UTF-8", "text": "# Source configuration for OpenXT master stable.\ncodebases_stable_master = {\n 'bitbake': {\n 'repository': 'https://git.openembedded.org/bitbake',\n 'branch': '1.46',\n 'revision': '',\n },\n 'meta-openembedded': {\n 'repository': 'https://git.openembedded.org/meta-openembedded',\n 'branch': 'dunfell',\n 'revision': '',\n },\n 'openembedded-core': {\n 'repository': 'https://git.openembedded.org/openembedded-core',\n 'branch': 'dunfell',\n 'revision': '',\n },\n 'meta-intel': {\n 'repository': 'https://git.yoctoproject.org/git/meta-intel',\n 'branch': 'dunfell',\n 'revision': '',\n },\n 'meta-java': {\n 'repository': 'https://git.yoctoproject.org/git/meta-java',\n 'branch': 'dunfell',\n 'revision': '',\n },\n 'meta-selinux': {\n 'repository': 'https://git.yoctoproject.org/git/meta-selinux',\n 'branch': 'dunfell',\n 'revision': '',\n },\n 'meta-virtualization': {\n 'repository': 'https://git.yoctoproject.org/git/meta-virtualization',\n 'branch': 'dunfell',\n 'revision': '',\n },\n 'meta-qt5': {\n 'repository': 'https://github.com/meta-qt5/meta-qt5.git',\n 'branch': 'dunfell',\n 'revision': '',\n },\n 'meta-openxt-haskell-platform': {\n 'repository': 'https://github.com/OpenXT/meta-openxt-haskell-platform.git',\n 'branch': 'master',\n 'revision': '',\n },\n 'meta-openxt-ocaml-platform': {\n 'repository': 'https://github.com/OpenXT/meta-openxt-ocaml-platform.git',\n 'branch': 'master',\n 'revision': '',\n },\n 'meta-vglass': {\n 'repository': 'https://gitlab.com/vglass/meta-vglass.git',\n 'branch': 'master',\n 'revision': '',\n },\n 'xenclient-oe': {\n 'repository': 'https://github.com/OpenXT/xenclient-oe.git',\n 'branch': 'master',\n 'revision': '',\n },\n 'bordel': {\n 'repository': 'https://github.com/OpenXT/bordel.git',\n 'branch': 'master',\n 'revision': '',\n },\n}\n" }, { "alpha_fraction": 0.7278710007667542, "alphanum_fraction": 0.7479434013366699, "avg_line_length": 31.677419662475586, "blob_id": "847d2952d8a08eb85739007c865499a8bc2aefe4", "content_id": "f68878c6315d04fbd0cc40a658e590d53c4af4d1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3039, "license_type": "no_license", "max_line_length": 114, "num_lines": 93, "path": "/README.md", "repo_name": "eric-ch/buildbot-openxt", "src_encoding": "UTF-8", "text": "# OpenXT Auto builder\n\n## Configuration\n\nCreate the Buildbot master:\n\n```sh\nbuildbot@master $ buildbot create-master -r <basedir>\nbuildbot@master $ git clone https://github.com/eric-ch/openxt-buildbot.git .\n```\n\nYou will need to register your buildbot-workers by editing `master.cfg`:\n```python\nc['workers'] = [\n worker.Worker(\"worker-name\", \"password\"),\n]\n```\n\nThis needs to match each worker configuration.\n```sh\nbuildbot@worker $ buildbot-worker create-worker --umask=0o22 <basedir> <master-host>:9989 \"worker-name\" \"password\"\n```\n\nAs well, the master needs some configuration, edit the main dictionaries for\nvarious worker groups, e.g:\n```python\nworkers_oe_10['names'] = [ 'debian10-0' ]\nworkers_oe_10['workdir'] = \"/var/builds/openxt\"\nworkers_oe_10['deploydir'] = \"/srv/http/OpenXT/auto\"\n\nworkers_win_10['names'] = [\"OpenXT-Win-0\"]\nworkers_win_10['workdir'] = \"c:\\\\builds\"\nworkers_win_10['deploydir'] = \"/srv/http/OpenXT/auto/windows\"\n```\nNote: This uses rsync/scp to copy the built artefacts with `urlhost:urlpath` as\ndestination.\n\n\nCreate at least an admin user for the Buildbot interface:\n```sh\nbuildbot@master $ ls buildbot.tac master.cfg\nbuildbot.tac master.cfg\nbuildbot@master $ htpasswd -c .htpasswd user passwd\n```\n\nStart the master:\n```sh\nroot@master $ systemctl start buildbot@<basedir>.service\n```\n\nDepending on your setup, allow TCP traffic to port 8010 (Buildbot HTTP UI) and\n9989 (Buildbot Worker registration service). The following example should be\nnarrowed down depending on your network structure (filter local IPs and what\nnot).\n\n```sh\nroot@master # iptables -A INPUT -p tcp -m tcp --dport 8010\nroot@master # iptables -A INPUT -p tcp -m tcp --dport 9989\n```\n\n## Before building, on the workers:\n\nDeploy your certificates.\nUsing the default path: `/var/builds/openxt/certs` will be where the `.pem`\nfiles should be found.\nExample with a self signed certificate:\n```sh\nbuildbot@worker $ cd /var/builds/openxt\nbuildbot@worker $ mkdir certs\nbuildbot@worker $ openssl genrsa -out certs/prod-cakey.pem 2048\nbuildbot@worker $ openssl genrsa -out certs/dev-cakey.pem 2048\nbuildbot@worker $ openssl req -new -x509 -key certs/prod-cakey.pem -out certs/prod-cacert.pem -days 1095\nbuildbot@worker $ openssl req -new -x509 -key certs/dev-cakey.pem -out certs/dev-cacert.pem -days 1095\n```\n\n## Sources\n\nThe OpenXT Auto-Builder is split in different files:\n- `config_*.py` with the default repository configurations for various builds.\n- `config.py` with helpers to aggregate these for Buildbot consumption.\n- `schedulers.py` holds the schedulers definition, including forced interfaces.\n- `factories_openxt.py` has the builders factories definitions for the OE\n components.\n- `factories_wintools.py` has the builders factories definitions for Windows\n components.\n\n## Improvements\n\n- Push the certificate to the worker from the build-master.\n- `repo_quick` should be fixed, see inline comments.\n- Replace Buildbot Upload steps with Rsync\n * Easier/Safer to manage `authorized_keys` with the hosting component\n fetching.\n" }, { "alpha_fraction": 0.6000000238418579, "alphanum_fraction": 0.6028938889503479, "avg_line_length": 32.440860748291016, "blob_id": "c32e0e9412c0d3ad10101effa59503351eff1671", "content_id": "b83622576fd1bd3ce8d11808ee943fbb48a861bd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3110, "license_type": "no_license", "max_line_length": 87, "num_lines": 93, "path": "/schedulers.py", "repo_name": "eric-ch/buildbot-openxt", "src_encoding": "UTF-8", "text": "# -*- python -*-\n\nfrom buildbot.plugins import (\n schedulers,\n util\n)\nfrom config.utils import codebases_to_params\n\ndef scheduler_force_custom(name, buttonName, builders, template_dfl, codebases_custom):\n return schedulers.ForceScheduler(\n name=name,\n buttonName=buttonName,\n label=\"Manual Custom build\",\n reason=util.StringParameter(\n name=\"reason\", label=\"Reason:\", required=False, size=140\n ),\n builderNames=builders,\n codebases=codebases_to_params(codebases_custom),\n properties=[\n util.StringParameter(\n name=\"template\", label=\"Configuration Template:\",\n default=template_dfl\n )\n ])\n\ndef scheduler_force_stable(name, builders, template_dfl, codebases_stable):\n return schedulers.ForceScheduler(\n name=name,\n buttonName=\"Stable build\",\n label=\"Trigger stable build\",\n reason=util.StringParameter(\n name=\"reason\", label=\"Reason:\", required=False, size=140\n ),\n builderNames=builders,\n codebases=codebases_to_params(codebases_stable),\n properties=[\n util.StringParameter(\n name=\"template\", label=\"Configuration Template:\",\n default=template_dfl\n )\n ])\n\ndef scheduler_nightly(name, builders, template_dfl, codebases, hour, minute):\n def filter_change(c):\n return c.codebase in codebases and \\\n c.branch == codebases.get(c.codebase).get('branch') and \\\n not codebases.get(c.codebase).get('revision')\n\n return schedulers.Nightly(\n name=name,\n codebases=codebases,\n properties={\n 'template': template_dfl\n },\n builderNames=builders,\n hour=hour,\n minute=minute,\n change_filter=util.ChangeFilter(filter_fn=filter_change),\n onlyIfChanged=True)\n\ndef scheduler_force_windows_tools(name, buttonName, builders, codebases):\n return schedulers.ForceScheduler(\n name=name,\n buttonName=buttonName,\n label=\"Manual Windows Tools build\",\n reason=util.StringParameter(\n name=\"reason\", label=\"Reason:\", required=False, size=140\n ),\n builderNames=builders,\n codebases=codebases_to_params(codebases),\n properties=[\n util.ChoiceStringParameter(\n name=\"type\", label=\"Build type:\",\n choices=[ 'free', 'checked' ],\n default='checked'\n )\n ])\n\ndef scheduler_nightly_windows_tools(name, builders, codebases, hour, minute):\n def filter_change(c):\n return c.codebase in codebases and \\\n c.branch == codebases.get(c.codebase).get('branch') and \\\n not codebases.get(c.codebase).get('revision')\n\n return schedulers.Nightly(\n name=name,\n codebases=codebases,\n properties={ 'type': 'free' },\n builderNames=builders,\n hour=hour,\n minute=minute,\n change_filter=util.ChangeFilter(filter_fn=filter_change),\n onlyIfChanged=True)\n" }, { "alpha_fraction": 0.6023383736610413, "alphanum_fraction": 0.6067051887512207, "avg_line_length": 42.41896057128906, "blob_id": "e0bc340e62437adf295f46213736ba0a69ddc812", "content_id": "05715405dc6e549dab7678dd121ee712a821d570", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 14198, "license_type": "no_license", "max_line_length": 105, "num_lines": 327, "path": "/factories/openxt.py", "repo_name": "eric-ch/buildbot-openxt", "src_encoding": "UTF-8", "text": "from buildbot.plugins import *\nfrom buildbot.process.results import SUCCESS\n\n# General notes:\n# - Bitbake will print 'Bitbake still alive (5000s)' when busy building things\n# for a long time (webkitgtk/uim/etc), so Timeout after ~5000s\nstep_timeout = 5030\n\n# Base environment:\n# - Requires read access to the certificates to sign the build.\n# - Requires read/write access to the download cache.\n# The autobuilder tree should look like:\n# | certs/\n# | workdir_base/\n# | workdir_base/downloads\n# | workdir_base/<ver>-custom\n# | workdir_base/<ver>-custom/certs -> ../../certs\n# | workdir_base/<ver>-custom/downloads -> ../downloads\n# | workdir_base/<ver>-stable\n# | workdir_base/<ver>-stable/certs -> ../../certs\n# | workdir_base/<ver>-stable/downloads -> ../downloads\n\n# Steps wrappers.\ndef step_init_tree(workdir):\n return steps.ShellSequence(\n workdir=workdir,\n #hideStepIf=lambda results, s: results==SUCCESS,\n name='Initialize environment',\n haltOnFailure=True,\n commands=[\n util.ShellArg(command=['mkdir', '-p', '../downloads'],\n haltOnFailure=True, logfile='stdio'),\n util.ShellArg(command=['ln', '-sfT', '../downloads', 'downloads'],\n haltOnFailure=True, logfile='stdio'),\n util.ShellArg(command=['ln', '-sfT', '../../certs', 'certs'],\n haltOnFailure=True, logfile='stdio')\n ])\n\ndef step_remove_history(workdir):\n return steps.ShellCommand(\n workdir=workdir,\n name='Remove build history',\n haltOnFailure=True,\n command=[ '/bin/sh', '-c', util.Interpolate(\" \\\n find . -maxdepth 1 ! -path . -name '%(prop:buildername)s-[0-9]*' | \\\n sort -V | \\\n head -n-2 | \\\n xargs rm -rf \\\n \")])\n\ndef step_bordel_config(workdir, template, legacy=False, sstate_uri=\"\"):\n return steps.ShellSequence(\n workdir=workdir,\n haltOnFailure=True,\n name='Configure source tree',\n commands=[\n util.ShellArg(command=[ './openxt/bordel/bordel', '-i', '0', 'config',\n '--default', '--force', '--rmwork', '-t', template ] +\n ([ '--no-repo-branch' ] if not legacy else []) +\n ([ '--sstate-mirror', sstate_uri ] if sstate_uri else []),\n haltOnFailure=True, logfile='stdio')\n ])\n\ndef step_set_build_id(workdir):\n return steps.ShellCommand(\n workdir=workdir,\n #hideStepIf=lambda results, s: results==SUCCESS,\n name='Set build ID',\n haltOnFailure=True,\n command=[ 'sed', '-i',\n '-e', util.Interpolate(\"s:^OPENXT_BUILD_ID\\s*=.*:OPENXT_BUILD_ID=\\\"%(prop:buildnumber)s\\\":\"),\n '-e', util.Interpolate(\"s:^OPENXT_VERSION\\s*=.*:OPENXT_VERSION=\\\"%(prop:buildername)s\\\":\"),\n './build-0/conf/openxt.conf'])\n\ndef step_bordel_layer_add(workdir, layer):\n return steps.ShellCommand(\n workdir=workdir,\n command=[ './openxt/bordel/bordel', 'layer', 'add', layer ],\n haltOnFailure=True,\n name='Add layer {}'.format(layer))\n\ndef step_bordel_build(workdir):\n return steps.ShellCommand(\n workdir=workdir,\n command=[ './openxt/bordel/bordel', '-i', '0', 'build' ],\n haltOnFailure=True, timeout=step_timeout,\n name='Build manifest')\n\ndef step_bordel_deploy(workdir):\n return steps.ShellCommand(\n workdir=workdir,\n command=[ './openxt/bordel/bordel', '-i', '0', 'deploy', 'iso' ],\n haltOnFailure=True,\n name='Assemble installer medium.')\n\n# Upload the installation artefacts to the build-master.\ndef step_upload_installer(srcfmt, destfmt):\n destpath = destfmt + \"/%(prop:buildername)s/%(prop:buildnumber)s\"\n return steps.DirectoryUpload(\n name='Upload installer',\n workersrc=util.Interpolate(srcfmt + \"/build-0/deploy\"),\n masterdest=util.Interpolate(destpath),\n url=None)\n\n# Upload the upgrade artefacts to the build-master.\ndef step_upload_upgrade(srcfmt, destfmt):\n destpath = destfmt + \"/%(prop:buildername)s/%(prop:buildnumber)s\"\n return steps.DirectoryUpload(\n name='Upload repository',\n workersrc=util.Interpolate(srcfmt + \"/build-0/staging/repository\"),\n masterdest=util.Interpolate(destpath + \"/repository\"),\n url=None)\n\n# Clean sstate of recipes that cause problems as mirror\ndef step_clean_problematic(workfmt):\n return [\n steps.ShellSequence(\n workdir=util.Interpolate(workfmt + \"/build-0\"),\n name='Clean problematic sstate (dom0)',\n env={\n 'BB_ENV_EXTRAWHITE': \"MACHINE DISTRO BUILD_UID LAYERS_DIR\",\n 'LAYERS_DIR': util.Interpolate(workfmt + \"/build-0/layers\"),\n 'BUILDDIR': util.Interpolate(workfmt + \"/build-0\"),\n 'PATH': [ util.Interpolate(workfmt + \"/build-0/layers/bitbake/bin\"),\n \"${PATH}\"],\n 'MACHINE': \"xenclient-dom0\"\n },\n haltOnFailure=True,\n commands=[\n util.ShellArg(command=[ 'bitbake', 'ghc-native',\n '-c', 'cleansstate' ],\n haltOnFailure=True, logfile='stdio'),\n util.ShellArg(command=[ 'bitbake', 'ocaml-cross-x86_64',\n '-c', 'cleansstate' ],\n haltOnFailure=True, logfile='stdio'),\n util.ShellArg(command=[ 'bitbake', 'findlib-cross-x86_64',\n '-c', 'cleansstate' ],\n haltOnFailure=True, logfile='stdio')\n ]\n ),\n steps.ShellCommand(\n workdir=util.Interpolate(workfmt + \"/build-0\"),\n name='Clean problematic sstate (installer)',\n env={\n 'BB_ENV_EXTRAWHITE': \"MACHINE DISTRO BUILD_UID LAYERS_DIR\",\n 'LAYERS_DIR': util.Interpolate(workfmt + \"/build-0/layers\"),\n 'BUILDDIR': util.Interpolate(workfmt + \"/build-0\"),\n 'PATH': [ util.Interpolate(workfmt + \"/build-0/layers/bitbake/bin\"),\n \"${PATH}\"],\n 'MACHINE': \"openxt-installer\"\n },\n command=[ 'bitbake', 'xenclient-installer-image', '-c', 'cleansstate'],\n haltOnFailure=True\n )\n ]\n\n# Flush and upload the sstate-cache to the build-master.\ndef step_upload_sstate(srcfmt, destfmt):\n destpath = destfmt + \"/%(prop:buildername)s/sstate/\"\n\n return [\n steps.MasterShellCommand(\n name=\"Remove stale shared-state.\",\n hideStepIf=lambda results, s: results==SUCCESS,\n command=[ \"rm\", \"-rf\", util.Interpolate(destpath)]\n ),\n steps.MultipleFileUpload(\n name='Upload shared-state',\n workdir=util.Interpolate(srcfmt + '/sstate-cache'),\n workersrcs=['{:02x}'.format(n) for n in range(256)] + ['debian-10'],\n masterdest=util.Interpolate(destpath),\n url=None\n )\n ]\n\n# Layout of the codebases for the different repositories for bordel.\ncodebase_layout = {\n 'bats-suite': '/openxt/bats-suite',\n 'bitbake': '/layers/bitbake',\n 'bordel': '/openxt/bordel',\n 'disman': '/openxt/disman',\n 'fbtap': '/openxt/fbtap',\n 'gene3fs': '/openxt/gene3fs',\n 'glass': '/openxt/glass',\n 'glassdrm': '/openxt/glassdrm',\n 'icbinn': '/openxt/icbinn',\n 'idl': '/openxt/idl',\n 'input': '/openxt/input',\n 'installer': '/openxt/installer',\n 'ivc': '/openxt/ivc',\n 'libedid': '/openxt/libedid',\n 'libxcdbus': '/openxt/libxcdbus',\n 'libxenbackend': '/openxt/libxenbackend',\n 'linux-xen-argo': '/openxt/linux-xen-argo',\n 'manager': '/openxt/manager',\n 'meta-intel': '/layers/meta-intel',\n 'meta-java': '/layers/meta-java',\n 'meta-openembedded': '/layers/meta-openembedded',\n 'meta-openxt-externalsrc': '/layers/meta-openxt-externalsrc',\n 'meta-openxt-haskell-platform': '/layers/meta-openxt-haskell-platform',\n 'meta-openxt-ocaml-platform': '/layers/meta-openxt-ocaml-platform',\n 'meta-qt5': '/layers/meta-qt5',\n 'meta-selinux': '/layers/meta-selinux',\n 'meta-vglass': '/layers/meta-vglass',\n 'meta-vglass-externalsrc': '/layers/meta-vglass-externalsrc',\n 'meta-virtualization': '/layers/meta-virtualization',\n 'network': '/openxt/network',\n 'openembedded-core': '/layers/openembedded-core',\n 'openxtfb': '/openxt/openxtfb',\n 'pv-display-helper': '/openxt/pv-display-helper',\n 'pv-linux-drivers': '/openxt/pv-linux-drivers',\n 'resized': '/openxt/resized',\n 'surfman': '/openxt/surfman',\n 'sync-client': '/openxt/sync-client',\n 'sync-wui': '/openxt/sync-wui',\n 'toolstack': '/openxt/toolstack',\n 'toolstack-data': '/openxt/toolstack-data',\n 'uid': '/openxt/uid',\n 'vusb-daemon': '/openxt/vusb-daemon',\n 'xblanker': '/openxt/xblanker',\n 'xclibs': '/openxt/xclibs',\n 'xctools': '/openxt/xctools',\n 'xenclient-oe': '/layers/xenclient-oe',\n 'xenfb2': '/openxt/xenfb2',\n 'xf86-video-openxtfb': '/openxt/xf86-video-openxtfb',\n 'xsm-policy': '/openxt/xsm-policy',\n}\n\n# Factory for OpenXT+Bordel builds until stable-9.\n# The bordel scripts used the bare-clone of each sub-project repository created\n# by Repo-tool as a version-control mirror (SRC_URI in layer recipes).\ndef factory_bordel_legacy(workdir_base, deploy_base, codebases_oe):\n workdir_fmt = workdir_base + \"/%(prop:buildername)s-%(prop:buildnumber)s\"\n f = util.BuildFactory()\n # Clean up past artefacts (first to make space if need be).\n f.addStep(step_remove_history(workdir_base))\n # Fetch sources.\n for codebase, defaults in codebases_oe.items():\n destdir = codebase_layout.get(codebase, '/unknown/' + codebase)\n f.addStep(steps.Git(\n haltOnFailure=True,\n workdir=util.Interpolate(workdir_fmt + destdir),\n repourl=util.Interpolate('%(src:' + codebase + ':repository)s'),\n branch=util.Interpolate('%(src:' + codebase + ':branch)s'),\n codebase=codebase,\n mode='incremental', clobberOnFailure=True\n ))\n # Bordel relies on repo building bare mirrors in there.\n # This could be changed to point to the actual clones though.\n if destdir.startswith('/openxt'):\n bare_name = defaults['repository'].split('/')[-1]\n base_name = bare_name\n if bare_name.endswith('.git'):\n base_name = bare_name[:-4]\n f.addStep(steps.ShellSequence(\n workdir=util.Interpolate(workdir_fmt + '/.repo/projects/openxt'),\n name='Fake Repo bare repository mirror.',\n hideStepIf=lambda results, s: results==SUCCESS,\n haltOnFailure=True,\n commands=[\n util.ShellArg(\n command=['ln', '-sfT',\n '../../../openxt/' + base_name + '/.git', bare_name ],\n haltOnFailure=True, logfile='stdio'\n ),\n util.ShellArg(\n command=['git', '-C', bare_name, 'branch', '-f', 'build-0' ],\n haltOnFailure=True, logfile='stdio'\n )]\n ))\n # Builder environment setup (handle first builds).\n f.addStep(step_init_tree(util.Interpolate(workdir_fmt)))\n # Build using bordel.\n f.addStep(step_bordel_config(util.Interpolate(workdir_fmt),\n util.Interpolate(\"%(prop:template)s\"), legacy=True))\n f.addStep(step_set_build_id(util.Interpolate(workdir_fmt)))\n f.addStep(step_bordel_build(util.Interpolate(workdir_fmt)))\n f.addStep(step_bordel_deploy(util.Interpolate(workdir_fmt)))\n f.addStep(step_upload_installer(workdir_fmt, deploy_base))\n f.addStep(step_upload_upgrade(workdir_fmt, deploy_base))\n return f\n\n\n# Factory for OpenXT+Bordel starting from branch \"zeus\" (post 9.x).\n# Bordel can now use the layers directly without requiring a local mirror for\n# each OpenXT sub-project. A few improvments:\n# - This flavor can export a build shared-state.\n# - Layers named '-externalsrc' present in codebases{}, are layered on top of\n# the given bblayers.conf provided by the template.\n# - If provided, the builder will try to use the given mirror_sstate.\ndef factory_bordel(workdir_base, deploy_base, codebases, deploy_sstate=False,\n mirror_sstate=\"\"):\n workdir_fmt = workdir_base + \"/%(prop:buildername)s-%(prop:buildnumber)s\"\n f = util.BuildFactory()\n # Remove past builds first.\n f.addStep(step_remove_history(workdir_base))\n # Fetch sources.\n for codebase, _ in codebases.items():\n destdir = codebase_layout.get(codebase, '/unknown/' + codebase)\n f.addStep(steps.Git(\n haltOnFailure=True,\n workdir=util.Interpolate(workdir_fmt + destdir),\n repourl=util.Interpolate('%(src:' + codebase + ':repository)s'),\n branch=util.Interpolate('%(src:' + codebase + ':branch)s'),\n codebase=codebase,\n mode='incremental', clobberOnFailure=True\n ))\n # Builder environment setup (handle first builds).\n f.addStep(step_init_tree(util.Interpolate(workdir_fmt)))\n # Configure the build environment.\n f.addStep(step_bordel_config(util.Interpolate(workdir_fmt),\n util.Interpolate(\"%(prop:template)s\"), legacy=False, sstate_uri=mirror_sstate))\n # Add externalsrc layers if any.\n # Note: match '-externalsrc' in codebase name. It lets us re-use the same\n # templates as regular builds.\n for codebase, _ in { k: v for k, v in codebases.items() if '-externalsrc' in k }.items():\n f.addStep(step_bordel_layer_add(util.Interpolate(workdir_fmt), codebase))\n # Set build-id and build.\n f.addStep(step_set_build_id(util.Interpolate(workdir_fmt)))\n f.addStep(step_bordel_build(util.Interpolate(workdir_fmt)))\n f.addStep(step_bordel_deploy(util.Interpolate(workdir_fmt)))\n f.addStep(step_upload_installer(workdir_fmt, deploy_base))\n f.addStep(step_upload_upgrade(workdir_fmt, deploy_base))\n if deploy_sstate:\n f.addSteps(step_clean_problematic(workdir_fmt))\n f.addSteps(step_upload_sstate(workdir_fmt, deploy_base))\n return f\n" }, { "alpha_fraction": 0.4927850067615509, "alphanum_fraction": 0.5036075115203857, "avg_line_length": 28.489360809326172, "blob_id": "28175e95ee395a7922f1326820a68eec6ff40a5c", "content_id": "df256903cec3057892bffb50bb8d865851bdaf3a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5544, "license_type": "no_license", "max_line_length": 83, "num_lines": 188, "path": "/config/custom_9.py", "repo_name": "eric-ch/buildbot-openxt", "src_encoding": "UTF-8", "text": "# Source configuration for OpenXT 9 custom.\ncodebases_custom_9 = {\n 'bitbake': {\n 'repository': 'https://git.openembedded.org/bitbake',\n 'branch': '1.34',\n 'revision': '',\n },\n 'meta-openembedded': {\n 'repository': 'https://git.openembedded.org/meta-openembedded',\n 'branch': 'pyro',\n 'revision': '',\n },\n 'openembedded-core': {\n 'repository': 'https://git.openembedded.org/openembedded-core',\n 'branch': 'pyro',\n 'revision': '',\n },\n 'meta-intel': {\n 'repository': 'https://git.yoctoproject.org/git/meta-intel',\n 'branch': 'pyro',\n 'revision': '',\n },\n 'meta-java': {\n 'repository': 'https://git.yoctoproject.org/git/meta-java',\n 'branch': 'pyro',\n 'revision': '',\n },\n 'meta-selinux': {\n 'repository': 'https://git.yoctoproject.org/git/meta-selinux',\n 'branch': 'master',\n 'revision': 'b1dac7e2b26f869c991c6492aa7fa18eaa4b47f6',\n },\n 'meta-virtualization': {\n 'repository': 'https://git.yoctoproject.org/git/meta-virtualization',\n 'branch': 'pyro',\n 'revision': '',\n },\n 'meta-openxt-haskell-platform': {\n 'repository': 'https://github.com/OpenXT/meta-openxt-haskell-platform.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'meta-openxt-ocaml-platform': {\n 'repository': 'https://github.com/OpenXT/meta-openxt-ocaml-platform.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'xenclient-oe': {\n 'repository': 'https://github.com/OpenXT/xenclient-oe.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'bordel': {\n 'repository': 'https://github.com/OpenXT/bordel.git',\n 'branch': 'master',\n 'revision': '',\n },\n 'fbtap': {\n 'repository': 'https://github.com/OpenXT/fbtap.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'gene3fs': {\n 'repository': 'https://github.com/OpenXT/gene3fs.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'icbinn': {\n 'repository': 'https://github.com/OpenXT/icbinn.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'idl': {\n 'repository': 'https://github.com/OpenXT/idl.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'input': {\n 'repository': 'https://github.com/OpenXT/input.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'installer': {\n 'repository': 'https://github.com/OpenXT/installer.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'libedid': {\n 'repository': 'https://github.com/OpenXT/libedid.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'libxcdbus': {\n 'repository': 'https://github.com/OpenXT/libxcdbus.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'libxenbackend': {\n 'repository': 'https://github.com/OpenXT/libxenbackend.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'linux-xen-argo': {\n 'repository': 'https://github.com/OpenXT/linux-xen-argo.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'manager': {\n 'repository': 'https://github.com/OpenXT/manager.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'network': {\n 'repository': 'https://github.com/OpenXT/network.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'pv-linux-drivers': {\n 'repository': 'https://github.com/OpenXT/pv-linux-drivers.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'resized': {\n 'repository': 'https://github.com/OpenXT/resized.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'surfman': {\n 'repository': 'https://github.com/OpenXT/surfman.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'sync-client': {\n 'repository': 'https://github.com/OpenXT/sync-client.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'sync-wui': {\n 'repository': 'https://github.com/OpenXT/sync-wui.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'toolstack-data': {\n 'repository': 'https://github.com/OpenXT/toolstack-data.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'toolstack': {\n 'repository': 'https://github.com/OpenXT/toolstack.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'uid': {\n 'repository': 'https://github.com/OpenXT/uid.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'vusb-daemon': {\n 'repository': 'https://github.com/OpenXT/vusb-daemon.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'xblanker': {\n 'repository': 'https://github.com/OpenXT/xblanker.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'xclibs': {\n 'repository': 'https://github.com/OpenXT/xclibs.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'xctools': {\n 'repository': 'https://github.com/OpenXT/xctools.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'xenfb2': {\n 'repository': 'https://github.com/OpenXT/xenfb2.git',\n 'branch': 'stable-9',\n 'revision': '',\n },\n 'xsm-policy': {\n 'repository': 'https://github.com/OpenXT/xsm-policy.git',\n 'branch': 'stable-9',\n 'revision': '',\n }\n}\n" }, { "alpha_fraction": 0.6325723528862, "alphanum_fraction": 0.639942467212677, "avg_line_length": 38.176055908203125, "blob_id": "7a99b4410f32ca0e377cc8f01c6a7d83dd0c07ce", "content_id": "be4d82bcdb2327f51e13071312444fe2d8c5e03d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5563, "license_type": "no_license", "max_line_length": 87, "num_lines": 142, "path": "/factories/windows_tools.py", "repo_name": "eric-ch/buildbot-openxt", "src_encoding": "UTF-8", "text": "import os\n\nfrom buildbot.plugins import *\nfrom buildbot.process.results import SUCCESS\n\n# General notes:\n\nstep_timeout = 1200\n\n# Base environment:\n# The autobuilder tree should look like:\n# | workdir_base/openxt\n# | workdir_base/openxt/windows/\n# | workdir_base/openxt/windows/winbuild-prepare.ps1\n# | workdir_base/openxt/windows/winbuild-all.ps1\n# | workdir_base/openxt/windows/xc-windows\n# | workdir_base/openxt/windows/xc-windows/dobuild.bat\n# | workdir_base/openxt/windows/xc-windows/xc-vusb\n\n# NOTE: This is not subtle at all and tied up in a lot of technical debt.\n# It is quite convoluted how things tie up together, but it is technically\n# possible to build each component then each driver separately.\n# Given the past EOL status of most of xc-windows.git, efforts would better be\n# invested in upgrading xc-vusb.git and using the upstream provided Xen PV\n# drivers for Windows. 9.0 even builds in the EWDK, which makes everything a\n# lot more simple.\n\n# Layout of the codebases for the different repositories to the build scripts.\ncodebase_layout = {\n 'openxt': r'\\openxt',\n 'xc-windows': r'\\openxt\\windows\\xc-windows',\n 'msi-installer': r'\\openxt\\windows\\msi-installer',\n 'xc-vusb': r'\\openxt\\windows\\xc-windows\\xc-vusb',\n}\n\ndef factory_windows_tools_8_2_0(workdir_base, deploydir, codebases):\n f = util.BuildFactory()\n f.addStep(steps.ShellSequence(\n hideStepIf=lambda results, s: results==SUCCESS,\n name='Initialize environment',\n haltOnFailure=True,\n commands=[\n util.ShellArg(command=[\n 'if', 'not', 'exist' , workdir_base, 'mkdir', workdir_base ],\n haltOnFailure=True, logfile='stdio')\n ]))\n for codebase, _ in codebases.items():\n destdir = codebase_layout.get(codebase, '/unknown/' + codebase)\n f.addStep(steps.Git(\n haltOnFailure=True,\n workdir=util.Interpolate(workdir_base + destdir),\n repourl=util.Interpolate('%(src:' + codebase + ':repository)s'),\n branch=util.Interpolate('%(src:' + codebase + ':branch)s'),\n codebase=codebase,\n mode='full', method='fresh', clobberOnFailure=True\n ))\n f.addStep(steps.ShellCommand(\n workdir=workdir_base + r'\\openxt\\windows',\n name='Configure the build environment',\n haltOnFailure=True,\n command=[ 'powershell', r'.\\winbuild-prepare.ps1',\n 'config=sample-config.xml',\n util.Interpolate(\"build=%(prop:buildnumber)s\"),\n 'certname=developer',\n 'branch=master']\n ))\n f.addStep(steps.ShellCommand(\n workdir=workdir_base + r'\\openxt\\windows',\n name='Build all',\n haltOnFailure=True,\n command=['powershell', r'.\\winbuild-all.ps1']\n ))\n f.addStep(steps.FileUpload(\n workersrc=workdir_base + r'\\openxt\\windows\\output\\xc-wintools.iso',\n masterdest=util.Interpolate(\n deploydir + r'/openxt-windows-tools-8.2.0-%(prop:buildnumber)s.iso'),\n url=None\n ))\n return f\n\n\n# Following the windows tool port on upstream 9.0.0 drivers, the sources are\n# arranged in a git with submodules.\ndef factory_windows_tools_9_0_0(workdir_base, deploydir, codebases):\n f = util.BuildFactory()\n f.addStep(steps.ShellSequence(\n hideStepIf=lambda results, s: results==SUCCESS,\n name='Initialize environment',\n haltOnFailure=True,\n commands=[\n util.ShellArg(command=[\n 'if', 'not', 'exist' , workdir_base, 'mkdir', workdir_base ],\n haltOnFailure=True, logfile='stdio')\n ]))\n # Fetch sources and external dependencies.\n f.addStep(steps.Git(\n haltOnFailure=True,\n workdir=util.Interpolate(workdir_base + r'\\win-pv'),\n repourl=util.Interpolate('%(src:win-pv:repository)s'),\n branch=util.Interpolate('%(src:win-pv:branch)s'),\n codebase='win-pv',\n mode='full', method='clobber', clobberOnFailure=True,\n submodules=True\n ))\n f.addStep(steps.ShellCommand(\n workdir=util.Interpolate(workdir_base + r'\\win-pv'),\n name='Fetch external dependencies',\n haltOnFailure=True,\n command=r'powershell .\\fetch-externals.ps1'\n ))\n # Build all, assumes the EWDK is on d:\n f.addStep(steps.ShellCommand(\n workdir=util.Interpolate(workdir_base + r'\\win-pv'),\n name='Configure and build',\n haltOnFailure=True,\n command=util.Interpolate(\n r'call d:\\BuildEnv\\SetupBuildEnv.cmd && call buildall.bat %(prop:type)s'\n )))\n # Upload installer depending on build type\n f.addStep(steps.FileUpload(\n name=\"Upload debug installer\",\n hideStepIf=util.Property(\"type\") != 'checked',\n doStepIf=util.Property(\"type\") == 'checked',\n workersrc=util.Interpolate(\n workdir_base + r'\\win-pv\\installer\\bin\\x64\\Debug\\OpenXT-Tools.msi'),\n masterdest=util.Interpolate(\n deploydir + r'/OpenXT-Tools-9.0.0-%(prop:buildnumber)s-%(prop:type)s.msi'),\n mode=0o644,\n url=None\n ))\n f.addStep(steps.FileUpload(\n name=\"Upload release installer\",\n hideStepIf=util.Property(\"type\") != 'free',\n doStepIf=util.Property(\"type\") == 'free',\n workersrc=util.Interpolate(\n workdir_base + r'\\win-pv\\installer\\bin\\x64\\Release\\OpenXT-Tools.msi'),\n masterdest=util.Interpolate(\n deploydir + r'/OpenXT-Tools-9.0.0-%(prop:buildnumber)s-%(prop:type)s.msi'),\n mode=0o644,\n url=None\n ))\n return f\n" }, { "alpha_fraction": 0.48370638489723206, "alphanum_fraction": 0.5747202038764954, "avg_line_length": 33.134830474853516, "blob_id": "d504e3921221d61cf9ae2052761a2dc24e42f166", "content_id": "90c3907821b0a87026d40556ae8413e1230f6fa4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6076, "license_type": "no_license", "max_line_length": 83, "num_lines": 178, "path": "/config/custom_zeus.py", "repo_name": "eric-ch/buildbot-openxt", "src_encoding": "UTF-8", "text": "# Source configuration for OpenXT Zeus custom.\ncodebases_custom_zeus = {\n 'bitbake': {\n 'repository': 'https://git.openembedded.org/bitbake',\n 'branch': '1.44',\n 'revision': '',\n },\n 'meta-openembedded': {\n 'repository': 'https://git.openembedded.org/meta-openembedded',\n 'branch': 'zeus',\n 'revision': '',\n },\n 'openembedded-core': {\n 'repository': 'https://git.openembedded.org/openembedded-core',\n 'branch': 'zeus',\n 'revision': '',\n },\n 'meta-intel': {\n 'repository': 'https://git.yoctoproject.org/git/meta-intel',\n 'branch': 'zeus',\n 'revision': '',\n },\n 'meta-java': {\n 'repository': 'https://git.yoctoproject.org/git/meta-java',\n 'branch': 'zeus',\n 'revision': '',\n },\n 'meta-selinux': {\n 'repository': 'https://git.yoctoproject.org/git/meta-selinux',\n 'branch': 'zeus',\n 'revision': '',\n },\n 'meta-virtualization': {\n 'repository': 'https://git.yoctoproject.org/git/meta-virtualization',\n 'branch': 'zeus',\n 'revision': '',\n },\n 'meta-openxt-haskell-platform': {\n 'repository': 'https://github.com/OpenXT/meta-openxt-haskell-platform.git',\n 'branch': 'zeus',\n 'revision': '',\n },\n 'meta-openxt-ocaml-platform': {\n 'repository': 'https://github.com/OpenXT/meta-openxt-ocaml-platform.git',\n 'branch': 'zeus',\n 'revision': '',\n },\n 'xenclient-oe': {\n 'repository': 'https://github.com/OpenXT/xenclient-oe.git',\n 'branch': 'zeus',\n 'revision': '',\n },\n 'meta-openxt-externalsrc': {\n 'repository': 'https://github.com/eric-ch/meta-openxt-externalsrc.git',\n 'branch': 'zeus',\n 'revision': '',\n },\n 'bats-suite': {\n 'repository': 'https://github.com/OpenXT/bats-suite.git',\n 'branch': 'master',\n 'revision': '4615057adc2670bc558d9322fa7941363eda3b9c',\n },\n 'bordel': {\n 'repository': 'https://github.com/OpenXT/bordel.git',\n 'branch': 'master',\n 'revision': '',\n },\n 'fbtap': {\n 'repository': 'https://github.com/OpenXT/fbtap.git',\n 'branch': 'master',\n 'revision': '30fd6ec306b188030a2fa58cde29c3e7f129c908',\n },\n 'icbinn': {\n 'repository': 'https://github.com/OpenXT/icbinn.git',\n 'branch': 'master',\n 'revision': 'dacb22e8499c99774375d0b7bad0f2880c734204',\n },\n 'idl': {\n 'repository': 'https://github.com/OpenXT/idl.git',\n 'branch': 'master',\n 'revision': '7ea58aa0afd1d3059a6c9ebe4cd304bad669204c',\n },\n 'input': {\n 'repository': 'https://github.com/OpenXT/input.git',\n 'branch': 'master',\n 'revision': '799c4861b59b100a5fbd68d406821125de714d9d',\n },\n 'installer': {\n 'repository': 'https://github.com/OpenXT/installer.git',\n 'branch': 'master',\n 'revision': '8831fe9ab2f57c72b8c99ff9ebbddf0e2254c6e5',\n },\n 'libedid': {\n 'repository': 'https://github.com/OpenXT/libedid.git',\n 'branch': 'master',\n 'revision': 'bad8ca87e4649e9aa931c607e6a17a7881c2cfd6',\n },\n 'libxcdbus': {\n 'repository': 'https://github.com/OpenXT/libxcdbus.git',\n 'branch': 'master',\n 'revision': '53650007eeb04abcccc72ae862632203cbbd6214',\n },\n 'libxenbackend': {\n 'repository': 'https://github.com/OpenXT/libxenbackend.git',\n 'branch': 'master',\n 'revision': '66c02bd96475a63f7ae3ed3f1300c8f9dcd5031e',\n },\n 'linux-xen-argo': {\n 'repository': 'https://github.com/OpenXT/linux-xen-argo.git',\n 'branch': 'master',\n 'revision': 'd84cf67969c5510e7e8f18dba2da62c68b565a2e',\n },\n 'manager': {\n 'repository': 'https://github.com/OpenXT/manager.git',\n 'branch': 'master',\n 'revision': 'b28ccc4d241728f6b840779328820cd1387bd14e',\n },\n 'network': {\n 'repository': 'https://github.com/OpenXT/network.git',\n 'branch': 'master',\n 'revision': 'dbf350a0c0f18c8f9dd41c8cda9473fb700c2aa9',\n },\n 'surfman': {\n 'repository': 'https://github.com/OpenXT/surfman.git',\n 'branch': 'master',\n 'revision': '46bfde5850d92399d36edb24313f2e404d53d4dd',\n },\n 'sync-client': {\n 'repository': 'https://github.com/OpenXT/sync-client.git',\n 'branch': 'master',\n 'revision': 'd501139ec156b8f3de5f7150f5d3559de0362fa9',\n },\n 'sync-wui': {\n 'repository': 'https://github.com/OpenXT/sync-wui.git',\n 'branch': 'master',\n 'revision': 'e37a0993f7821365e7dae1e969a837125df418de',\n },\n 'toolstack-data': {\n 'repository': 'https://github.com/OpenXT/toolstack-data.git',\n 'branch': 'master',\n 'revision': 'cb6dd61aee762507dbd432f43b012068353ed0d0',\n },\n 'toolstack': {\n 'repository': 'https://github.com/OpenXT/toolstack.git',\n 'branch': 'master',\n 'revision': '8e1f91f340a38203c9d0d95e672e3a94c2137aaf',\n },\n 'uid': {\n 'repository': 'https://github.com/OpenXT/uid.git',\n 'branch': 'master',\n 'revision': '30dc9152274b92ca4a73813b43477deac3c24442',\n },\n 'vusb-daemon': {\n 'repository': 'https://github.com/OpenXT/vusb-daemon.git',\n 'branch': 'master',\n 'revision': '9877a53cf64d1f7a03b2637b59fff281ce721e4c',\n },\n 'xclibs': {\n 'repository': 'https://github.com/OpenXT/xclibs.git',\n 'branch': 'master',\n 'revision': 'ef260585087aa24fff09d5d8d75c7e9cd70c0f30',\n },\n 'xctools': {\n 'repository': 'https://github.com/OpenXT/xctools.git',\n 'branch': 'master',\n 'revision': '8abe7f9aa43cd7a95b958e28e80fbfa2bea1c682',\n },\n 'xenfb2': {\n 'repository': 'https://github.com/OpenXT/xenfb2.git',\n 'branch': 'master',\n 'revision': '33dd2ddd9e083bae500dc6710ffb4f915a8a8ef8',\n },\n 'xsm-policy': {\n 'repository': 'https://github.com/OpenXT/xsm-policy.git',\n 'branch': 'master',\n 'revision': '872126eb809f8be90bd5eb9850d6fe673ce3025c',\n }\n}\n" }, { "alpha_fraction": 0.6733333468437195, "alphanum_fraction": 0.6841025352478027, "avg_line_length": 30.967212677001953, "blob_id": "5bcd3245bb3f551b8296c91659897d31ed420ff5", "content_id": "562bd1904997c252b1c00839a66256aed52b841f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1950, "license_type": "no_license", "max_line_length": 99, "num_lines": 61, "path": "/config/utils.py", "repo_name": "eric-ch/buildbot-openxt", "src_encoding": "UTF-8", "text": "from buildbot.plugins import changes, util\n\n# That's annoying....\ndef codebases_to_params(codebases):\n codebases_params = []\n for name, defaults in codebases.items():\n codebases_params.append(\n util.CodebaseParameter(\n codebase=name,\n repository=util.StringParameter(name='repository', default=defaults['repository']),\n branch=util.StringParameter(name='branch', default=defaults['branch']),\n revision=util.StringParameter(name='revision', default=defaults['revision']),\n project=None\n ))\n return codebases_params\n\n#\n# codebaseGenerator and dictionary generation from codebases configurations.\n#\nfrom config.custom_9 import codebases_custom_9\nfrom config.custom_zeus import codebases_custom_zeus\nfrom config.stable_zeus import codebases_stable_zeus\nfrom config.custom_master import (\n codebases_custom_master,\n)\nfrom config.stable_master import codebases_stable_master\n\nfrom config.windows_tools import (\n codebases_windows_tools_8_2_0,\n codebases_windows_tools_9_0_0,\n)\n\nall_repositories = {}\nrepo_branches = {}\nfor cb in [\n codebases_custom_9,\n codebases_custom_zeus,\n codebases_stable_zeus,\n codebases_custom_master,\n codebases_stable_master,\n codebases_windows_tools_8_2_0,\n codebases_windows_tools_9_0_0 ]:\n for name, defaults in cb.items():\n all_repositories[defaults['repository']] = name\n repo_branches.setdefault(defaults['repository'], []).append(defaults['branch'])\n\ndef codebaseGenerator(chdict):\n return all_repositories[chdict['repository']]\n\n#\n# Change tracking.\n# Poll upstream codebases for changes every 4h.\n#\npollinterval = 4 * 60 * 60\nchange_source = []\nfor repourl, branches in repo_branches.items():\n change_source.append(changes.GitPoller(\n repourl=repourl,\n branches=branches,\n pollinterval=pollinterval\n ))\n" }, { "alpha_fraction": 0.6388611197471619, "alphanum_fraction": 0.6524308919906616, "avg_line_length": 31.64130401611328, "blob_id": "b08d8465c7e4e497ee8f8ac5dea4f138708e4f51", "content_id": "f083ce0ad3dbb6bdfb3323e0581cad919e433036", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12012, "license_type": "no_license", "max_line_length": 136, "num_lines": 368, "path": "/master.cfg", "repo_name": "eric-ch/buildbot-openxt", "src_encoding": "UTF-8", "text": "# -*- python -*-\n# ex: set filetype=python:\n\nfrom buildbot.plugins import *\nimport importlib\n\nimport config.custom_9\nimport config.custom_zeus\nimport config.stable_zeus\nimport config.custom_master\nimport config.stable_master\nimport config.windows_tools\nimport config.utils\nimport schedulers\nimport factories.openxt\nimport factories.windows_tools\n\nimportlib.reload(config.custom_9)\nimportlib.reload(config.custom_zeus)\nimportlib.reload(config.stable_zeus)\nimportlib.reload(config.custom_master)\nimportlib.reload(config.stable_master)\nimportlib.reload(config.windows_tools)\nimportlib.reload(config.utils)\nimportlib.reload(schedulers)\nimportlib.reload(factories.openxt)\nimportlib.reload(factories.windows_tools)\n\n# This file must be installed as 'master.cfg' in your buildmaster's base\n# directory.\n\n# This is the dictionary that the buildmaster pays attention to. We also use a\n# shorter alias to save typing.\nc = BuildmasterConfig = {}\n\n####### WORKERS\n\n# The 'workers' list defines the set of recognized workers. Each element is a\n# Worker object, specifying a unique worker name and password.\n# The same worker name and password must be configured on the worker.\nc['workers'] = [\n worker.Worker(\"debian8-0\", \"password\"),\n worker.Worker(\"debian10-0\", \"password\"),\n worker.Worker(\"OpenXT-Win-0\", \"password\"),\n]\n\nworkers_oe_9 = {}\nworkers_oe_9['names'] = [ \"debian8-0\" ]\nworkers_oe_9['workdir'] = \"/var/builds/openxt/auto/9\"\nworkers_oe_9['deploydir'] = \"/srv/http/OpenXT/auto/9\"\n\nworkers_oe_zeus = {}\nworkers_oe_zeus['names'] = [ \"debian10-0\" ]\nworkers_oe_zeus['workdir'] = \"/var/builds/openxt/auto/zeus\"\nworkers_oe_zeus['deploydir'] = \"/srv/http/OpenXT/auto/zeus\"\nworkers_oe_zeus['sstateuri'] = \"https://openxt.ainfosec.com/OpenXT/auto/zeus/zeus-stable-sstate/sstate/PATH;downloadfilename=PATH\"\n\nworkers_oe_master = {}\nworkers_oe_master['names'] = [ \"debian10-0\" ]\nworkers_oe_master['workdir'] = \"/var/builds/openxt/auto/master\"\nworkers_oe_master['deploydir'] = \"/srv/http/OpenXT/auto/master\"\nworkers_oe_master['sstateuri'] = \"https://openxt.ainfosec.com/OpenXT/auto/master/master-stable-sstate/sstate/PATH;downloadfilename=PATH\"\n\nworkers_win = {}\nworkers_win['names'] = [ \"OpenXT-Win-0\" ]\nworkers_win['workdir'] = r\"c:\\builds\"\nworkers_win['deploydir'] = \"/srv/http/OpenXT/auto/windows\"\n\n# Locking: Limit the number of builds per worker.\nlock_workers = util.WorkerLock(\n \"lock_workers\",\n maxCount=1\n)\n\n# 'protocols' contains information about protocols which master will use for\n# communicating with workers. You must define at least 'port' option that\n# workers could connect to your master with this protocol.\n# 'port' must match the value configured into the workers (with their --master\n# option)\nc['protocols'] = { 'pb': { 'port': 9989 } }\n\n####### CHANGESOURCES\n\n# the 'change_source' setting tells the buildmaster how it should find out\n# about source code changes.\n\nfrom config.utils import (\n codebaseGenerator,\n change_source\n)\n\nc['change_source'] = change_source\nc['codebaseGenerator'] = codebaseGenerator\n\n####### SCHEDULERS\n\n# Configure the Schedulers, which decide how to react to incoming changes.\n\nfrom schedulers import (\n scheduler_force_custom,\n scheduler_force_windows_tools,\n scheduler_force_stable,\n scheduler_nightly,\n scheduler_nightly_windows_tools\n)\n\nfrom config.custom_9 import codebases_custom_9\nfrom config.custom_zeus import codebases_custom_zeus\nfrom config.stable_zeus import codebases_stable_zeus\nfrom config.custom_master import (\n codebases_custom_master,\n)\nfrom config.stable_master import codebases_stable_master\n\nfrom config.windows_tools import (\n codebases_windows_tools_8_2_0,\n codebases_windows_tools_9_0_0,\n)\n\nc['schedulers'] = [\n # OpenXT 9\n scheduler_force_custom(\n \"custom-9\", \"Custom 9 Legacy\",\n [ \"9-custom\" ],\n \"stable-9\", codebases_custom_9\n ),\n scheduler_nightly(\n \"nightly-9-tip\", [ \"9-custom\" ],\n \"stable-9\", codebases_custom_9,\n 22, 00\n ),\n\n # OpenXT Zeus\n scheduler_force_custom(\n \"custom-zeus\", \"Custom Zeus\",\n [ \"zeus-custom\" ],\n \"zeus\", codebases_custom_zeus\n ),\n scheduler_force_stable(\n \"stable-zeus\", [ \"zeus-stable\", \"zeus-stable-sstate\" ],\n \"zeus\", codebases_stable_zeus\n ),\n scheduler_nightly(\n \"nightly-stable-zeus\", [ \"zeus-stable-sstate\" ],\n \"zeus\", codebases_stable_zeus,\n 1, 0\n ),\n\n # OpenXT Master\n scheduler_force_custom(\n \"custom-master\", \"Custom Master\",\n [ \"master-custom\" ],\n \"master\", codebases_custom_master\n ),\n scheduler_force_stable(\n \"stable-master\", [ \"master-stable\", \"master-stable-sstate\" ],\n \"master\", codebases_stable_master\n ),\n # Master nightlies need to trigger on layers and sub-projects changes as\n # the layers track repositories tips. Hence pass the scheduler the custom\n # codebase dict.\n scheduler_nightly(\n \"nightly-stable-master\", [ \"master-stable-sstate\" ],\n \"master\", codebases_custom_master,\n 3, 0\n ),\n\n # Wintools\n scheduler_force_windows_tools(\n \"windows-tools-8-2-0\",\n \"Manual Windows Tools 8.2.0 build\",\n [ \"windows-tools-8-2-0\" ],\n codebases_windows_tools_8_2_0\n ),\n scheduler_nightly_windows_tools(\n \"nightly-windows-tools-8-2-0\", [ \"windows-tools-8-2-0\" ],\n codebases_windows_tools_8_2_0,\n 5, 00\n ),\n scheduler_force_windows_tools(\n \"windows-tools-9-0-0\",\n \"Manual Windows Tools 9.0.0 build\",\n [ \"windows-tools-9-0-0\" ],\n codebases_windows_tools_9_0_0\n ),\n scheduler_nightly_windows_tools(\n \"nightly-windows-tools-9-0-0\", [ \"windows-tools-9-0-0\" ],\n codebases_windows_tools_9_0_0,\n 5, 00\n )\n]\n\n####### BUILDERS\n\n# The 'builders' list defines the Builders, which tell Buildbot how to perform\n# a build: what steps, and which workers can execute them. Note that any\n# particular build will only take place on one worker.\nfrom factories.openxt import (\n factory_bordel_legacy,\n factory_bordel,\n)\nfrom factories.windows_tools import (\n factory_windows_tools_8_2_0,\n factory_windows_tools_9_0_0,\n)\n\nc['builders'] = [\n util.BuilderConfig(\n name=\"9-custom\",\n description=\"OpenXT 9: Customized build.\",\n workernames=workers_oe_9['names'],\n factory=factory_bordel_legacy(\n workers_oe_9['workdir'], workers_oe_9['deploydir'],\n codebases_custom_9),\n locks=[ lock_workers.access('counting') ]\n ),\n\n# OpenXT/Zeus\n util.BuilderConfig(\n name=\"zeus-custom\",\n description=\"OpenXT/Zeus: Customized clean build.\",\n workernames=workers_oe_zeus['names'],\n factory=factory_bordel(\n workers_oe_zeus['workdir'], workers_oe_zeus['deploydir'],\n codebases_custom_zeus,\n mirror_sstate=workers_oe_zeus['sstateuri']),\n locks=[ lock_workers.access('counting') ]\n ),\n util.BuilderConfig(\n name=\"zeus-stable\",\n description=\"OpenXT/Zeus: Stable build.\",\n workernames=workers_oe_zeus['names'],\n factory=factory_bordel(\n workers_oe_zeus['workdir'], workers_oe_zeus['deploydir'],\n codebases_stable_zeus,\n mirror_sstate=workers_oe_zeus['sstateuri']),\n locks=[ lock_workers.access('counting') ]\n ),\n util.BuilderConfig(\n name=\"zeus-stable-sstate\",\n description=\"OpenXT/Zeus: Stable build (export sstate).\",\n workernames=workers_oe_zeus['names'],\n factory=factory_bordel(\n workers_oe_zeus['workdir'], workers_oe_zeus['deploydir'],\n codebases_stable_zeus,\n deploy_sstate=True),\n locks=[ lock_workers.access('counting') ]\n ),\n\n# OpenXT/Master\n util.BuilderConfig(\n name=\"master-custom\",\n description=\"OpenXT/Master: Customized build.\",\n workernames=workers_oe_master['names'],\n factory=factory_bordel(\n workers_oe_master['workdir'], workers_oe_master['deploydir'],\n codebases_custom_master,\n mirror_sstate=workers_oe_master['sstateuri']),\n locks=[ lock_workers.access('counting') ]\n ),\n util.BuilderConfig(\n name=\"master-stable\",\n description=\"OpenXT/Master: Stable build.\",\n workernames=workers_oe_master['names'],\n factory=factory_bordel(\n workers_oe_master['workdir'], workers_oe_master['deploydir'],\n codebases_stable_master,\n mirror_sstate=workers_oe_master['sstateuri']),\n locks=[ lock_workers.access('counting') ]\n ),\n util.BuilderConfig(\n name=\"master-stable-sstate\",\n description=\"OpenXT/Master: Stable build (export sstate).\",\n workernames=workers_oe_master['names'],\n factory=factory_bordel(\n workers_oe_master['workdir'], workers_oe_master['deploydir'],\n codebases_stable_master,\n deploy_sstate=True),\n locks=[ lock_workers.access('counting') ]\n ),\n\n# OpenXT Windows PV tools.\n util.BuilderConfig(\n name=\"windows-tools-8-2-0\",\n description=\"OpenXT: Windows PV tools build based on Xen upstream 8.2.0.\",\n workernames=workers_win['names'],\n factory=factory_windows_tools_8_2_0(\n workers_win['workdir'] + r'\\8.2.0',\n workers_win['deploydir'] + '/8.2.0',\n codebases_windows_tools_8_2_0),\n locks=[ lock_workers.access('counting') ]\n ),\n util.BuilderConfig(\n name=\"windows-tools-9-0-0\",\n description=\"OpenXT: Windows PV tools build based on Xen upstream 9.0.0.\",\n workernames=workers_win['names'],\n factory=factory_windows_tools_9_0_0(\n workers_win['workdir'] + r'\\9.0.0',\n workers_win['deploydir'] + '/9.0.0',\n codebases_windows_tools_9_0_0),\n locks=[ lock_workers.access('counting') ]\n )\n]\n\n####### BUILDBOT SERVICES\n\n# 'services' is a list of BuildbotService items like reporter targets. The\n# status of each build will be pushed to these targets. buildbot/reporters/*.py\n# has a variety to choose from, like IRC bots.\n\nc['services'] = []\n\n####### PROJECT IDENTITY\n\n# the 'title' string will appear at the top of this buildbot installation's\n# home pages (linked to the 'titleURL').\n\nc['title'] = \"OpenXT Buildbot\"\nc['titleURL'] = \"https://github.com/OpenXT\"\n\n# the 'buildbotURL' string should point to the location where the buildbot's\n# internal web server is visible. This typically uses the port number set in\n# the 'www' entry below, but with an externally-visible host name which the\n# buildbot cannot figure out without some help.\n\nc['buildbotURL'] = \"http://localhost:8010/\"\n\n# TODO: Use util.HTPasswdAuth once fixed.\n# https://github.com/buildbot/buildbot/issues/3126\nimport subprocess\nclass HTPasswdAuth(util.CustomAuth):\n def check_credentials(self, user, passwd):\n return subprocess.run([ \"htpasswd\", \"-vb\", \".htpasswd\", user, passwd ]).returncode == 0\n\n# minimalistic Config to activate new web UI\nc['www'] = {\n 'port': 8010,\n 'plugins': {\n 'waterfall_view': {},\n 'console_view': {},\n 'grid_view': {}\n },\n# 'auth': util.HTPasswdAuth('.htpasswd'),\n 'auth': HTPasswdAuth(),\n 'authz': util.Authz(\n allowRules=[ util.AnyControlEndpointMatcher(role='admin') ],\n roleMatchers=[\n util.RolesFromUsername(\n roles=[ 'admin' ],\n usernames=[ 'admin' ])\n ])\n}\n\n####### DB URL\n\nc['db'] = {\n # This specifies what database buildbot uses to store its state.\n # It's easy to start with sqlite, but it's recommended to switch to a\n # dedicated database, such as PostgreSQL or MySQL, for use in production\n # environments.\n # http://docs.buildbot.net/current/manual/configuration/global.html#database-specification\n 'db_url' : \"sqlite:///state.sqlite\",\n}\n\n####### Misc\n\n# Don't send usage analysis info to buildbot.net.\nc['buildbotNetUsageData'] = None\n" } ]
11
Global19-atlassian-net/multiuserblazeserver
https://github.com/Global19-atlassian-net/multiuserblazeserver
e0089109a4e9a648726fe18a8518a2cc58dccb5c
0926e5f547c46361e9dfb5e8236461a1ce86479b
57a68adbcd4196cc915aafed90e01ba5e49de541
refs/heads/master
2021-05-30T00:08:26.282483
2015-03-08T19:53:07
2015-03-08T19:53:07
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6786389350891113, "alphanum_fraction": 0.6858223080635071, "avg_line_length": 34.74324417114258, "blob_id": "dc51bce2ffc58faf322e377c1ad499830edcd4d1", "content_id": "e82430ad07ffd746307c12e8f84988b3c74d4d8f", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2645, "license_type": "permissive", "max_line_length": 79, "num_lines": 74, "path": "/mbs/tests/test_settings.py", "repo_name": "Global19-atlassian-net/multiuserblazeserver", "src_encoding": "UTF-8", "text": "import cStringIO\nimport tempfile\nfrom os.path import join\n\nfrom nose.tools import with_setup\nfrom blaze.utils import example\n\nfrom mbs.settings import settings\nfrom . import config_file, data_file\n\ndef setup_function():\n settings.reset()\n settings.data_file = tempfile.mkdtemp()\n datadir = tempfile.mkdtemp()\n settings.data_directory = datadir\n\ndef teardown_function():\n pass\n\n\n@with_setup(setup_function, teardown_function)\ndef test_from_pyfile_no_data_directory():\n old_data_dir = settings.data_directory\n config = config_file(\"config_no_data_directory.py\")\n assert 'data' not in settings.data\n settings.from_pyfile(config)\n assert 'data' in settings.data\n assert settings.data_directory == old_data_dir\n\n@with_setup(setup_function, teardown_function)\ndef test_from_pyfile_with_data_directory():\n old_data_dir = settings.data_directory\n config = config_file(\"config_with_data_directory.py\")\n assert 'data' not in settings.data\n settings.from_pyfile(config)\n assert 'data' in settings.data\n assert settings.data_directory == '/tmp'\n\n@with_setup(setup_function, teardown_function)\ndef test_dataset_storage():\n storage = settings.storage\n assert len(storage) == 0\n\n@with_setup(setup_function, teardown_function)\ndef test_all_datasets():\n config = config_file(\"config.py\")\n settings.from_pyfile(config)\n settings.data_directory = data_file('datadir')\n all_sets = settings.datamanager.all_datasets()\n assert set(all_sets.keys()) == {'accounts', 'cities', 'events'}\n\n #now we introduce an error by adding a csv that has tab separators\n settings.datamanager.configure('defaultuser/test.csv', delimiter=\"|\")\n path = join('defaultuser', 'test.hdf5')\n uri1 = 'hdfstore://%s' % path\n settings.datamanager.configure(uri1)\n uri2 = 'hdfstore://%s::temp' % path\n settings.datamanager.configure(uri2)\n all_sets = settings.datamanager.all_datasets()\n assert set(all_sets.keys()) == {'accounts', 'cities', 'events',\n join('defaultuser', 'test.csv'),\n uri1, uri2\n }\n assert all_sets[uri1]['temp'].shape == (5,1)\n #why is this one a list?!\n assert all_sets[uri2].shape == [5,1]\n\n@with_setup(setup_function, teardown_function)\ndef test_ls():\n settings.datamanager.write('firstuser', 'foo.hdf5', cStringIO.StringIO())\n settings.datamanager.write('firstuser', 'test.csv', cStringIO.StringIO())\n settings.datamanager.write('seconduser', 'foo2.hdf5', cStringIO.StringIO())\n assert len(settings.datamanager.ls('firstuser')) == 2\n assert len(settings.datamanager.ls()) == 3\n" }, { "alpha_fraction": 0.5867884755134583, "alphanum_fraction": 0.5913562774658203, "avg_line_length": 27.747474670410156, "blob_id": "4bea4729fc2a06d98632ae3ef1068c0238404cf6", "content_id": "eb9ad1e2bf4ef14cdffe2cea06d7c72e576a2623", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2846, "license_type": "permissive", "max_line_length": 79, "num_lines": 99, "path": "/mbs/tests/test_views.py", "repo_name": "Global19-atlassian-net/multiuserblazeserver", "src_encoding": "UTF-8", "text": "import tempfile\nimport shutil\nfrom os.path import exists, join\nimport json\n\nfrom nose.tools import with_setup\n\nfrom mbs.app import setup_app\nfrom mbs.settings import settings\nfrom . import config_file, data_file\ntest = None\ndata = None\nt = None\ndatadir = None\ndef setup_function():\n global app\n global test\n global datadir\n global t\n datadir = tempfile.mkdtemp()\n config = config_file(\"config.py\")\n app = setup_app(config_file=config)\n test = app.test_client()\n data = settings.data\n settings.data_directory = datadir\n\ndef teardown_function():\n global app\n global test\n global data\n global datadir\n\n if exists(datadir):\n shutil.rmtree(datadir)\n\n app = None\n test = None\n data = None\n datadir = None\n\nold = None\n\ndef setup_auth_test():\n global old\n setup_function()\n old = settings.auth_backend.can_write\n def reject(path, username):\n return False\n settings.auth_backend.can_write = reject\n\ndef teardown_auth_test():\n teardown_function()\n global old\n settings.auth_backend.can_write = old\n\n@with_setup(setup_function, teardown_function)\ndef test_upload():\n with open(data_file('test.csv')) as f:\n resp = test.post(\"/upload\",\n data={'file' : (f, 'test.csv')}\n )\n assert resp.status_code == 200\n result = json.loads(resp.data.decode('utf-8'))\n assert result['path'] == \"defaultuser/test.csv\"\n assert exists(join(settings.data_directory, result['path']))\n\n@with_setup(setup_auth_test, teardown_auth_test)\ndef test_upload_without_permissions():\n with open(data_file('test.csv')) as f:\n resp = test.post(\"/upload\",\n data={'file' : (f, 'test.csv')}\n )\n assert resp.status_code == 403\n assert not exists(join(settings.data_directory, \"defaultuser\", \"test.csv\"))\n\n@with_setup(setup_function, teardown_function)\ndef test_configure():\n resp = test.post(\"/configure\",\n data=json.dumps(\n {'kwargs' : {'delimiter' : '\\t'},\n 'uri' : \"defaultuser/test.csv\"\n }),\n headers={'content-type' : 'application/json'}\n )\n assert resp.status_code == 200\n result = resp.data == 'success'\n assert settings.storage['defaultuser/test.csv'] == {u'delimiter': u'\\t'}\n\n@with_setup(setup_auth_test, teardown_auth_test)\ndef test_configure_without_permissions():\n #monkey patch auth backend to disallow upload\n resp = test.post(\"/configure\",\n data=json.dumps(\n {'kwargs' : {'delimiter' : '\\t'},\n 'uri' : \"defaultuser/test.csv\"\n }),\n headers={'content-type' : 'application/json'}\n )\n assert resp.status_code == 403\n" }, { "alpha_fraction": 0.6226838827133179, "alphanum_fraction": 0.6260527968406677, "avg_line_length": 29.44444465637207, "blob_id": "98e9cf0680ad04ecefa7394a947bbf8b9396c5b7", "content_id": "f4f3d78f3d849016e6833f4e56d8969ed12cec22", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3562, "license_type": "permissive", "max_line_length": 72, "num_lines": 117, "path": "/mbs/tests/test_server.py", "repo_name": "Global19-atlassian-net/multiuserblazeserver", "src_encoding": "UTF-8", "text": "from __future__ import absolute_import, division, print_function\nfrom os.path import dirname, join as pjoin\nfrom datetime import datetime\n\nfrom nose.tools import with_setup\nimport datashape\nimport numpy as np\nfrom flask import json\nfrom datetime import datetime\nfrom pandas import DataFrame\nfrom toolz import pipe\n\nfrom blaze.utils import example\nfrom blaze import discover, symbol, by, CSV, compute, join, into\nfrom blaze.server.server import to_tree, from_tree\n\nfrom mbs.app import setup_app\nfrom mbs.settings import settings\nfrom . import config_file\ntest = None\ndata = None\nt = None\n\ndef setup_function():\n global app\n global test\n global data\n global t\n\n config = config_file(\"config.py\")\n app = setup_app(config_file=config)\n test = app.test_client()\n data = settings.data\n t = symbol('t', discover(data))\n\ndef teardown_function():\n global app\n global test\n global data\n\n app = None\n test = None\n data = None\n\n@with_setup(setup_function, teardown_function)\ndef test_datasets():\n response = test.get('/datashape')\n assert response.data.decode('utf-8') == str(discover(data))\n\n@with_setup(setup_function, teardown_function)\ndef test_bad_responses():\n assert 'OK' not in test.post('/compute/accounts.json',\n data = json.dumps(500),\n content_type='application/json').status\n assert 'OK' not in test.post('/compute/non-existent-table.json',\n data = json.dumps(0),\n content_type='application/json').status\n assert 'OK' not in test.post('/compute/accounts.json').status\n\n\n@with_setup(setup_function, teardown_function)\ndef test_compute():\n expr = t.accounts.amount.sum()\n query = {'expr': to_tree(expr)}\n expected = 300\n\n response = test.post('/compute.json',\n data = json.dumps(query),\n content_type='application/json')\n\n assert 'OK' in response.status\n assert json.loads(response.data.decode('utf-8'))['data'] == expected\n\n@with_setup(setup_function, teardown_function)\ndef test_get_datetimes():\n events = data['events']\n expr = t.events\n query = {'expr': to_tree(expr)}\n\n response = test.post('/compute.json',\n data=json.dumps(query),\n content_type='application/json')\n\n assert 'OK' in response.status\n result = json.loads(response.data.decode('utf-8'))\n ds = datashape.dshape(result['datashape'])\n result = into(np.ndarray, result['data'], dshape=ds)\n assert into(list, result) == into(list, events)\n\n@with_setup(setup_function, teardown_function)\ndef test_multi_expression_compute():\n s = symbol('s', discover(data))\n\n expr = join(s.accounts, s.cities)\n\n resp = test.post('/compute.json',\n data=json.dumps({'expr': to_tree(expr)}),\n content_type='application/json')\n\n assert 'OK' in resp.status\n result = json.loads(resp.data.decode('utf-8'))['data']\n expected = compute(expr, {s: data})\n\n assert list(map(tuple, result))== into(list, expected)\n\n@with_setup(setup_function, teardown_function)\ndef test_leaf_symbol():\n cities = data['cities']\n query = {'expr': {'op': 'Field', 'args': [':leaf', 'cities']}}\n resp = test.post('/compute.json',\n data=json.dumps(query),\n content_type='application/json')\n\n a = json.loads(resp.data.decode('utf-8'))['data']\n b = into(list, cities)\n\n assert list(map(tuple, a)) == b\n" }, { "alpha_fraction": 0.6415929198265076, "alphanum_fraction": 0.6415929198265076, "avg_line_length": 24.11111068725586, "blob_id": "3d3f11a171e796ea8d45005ea06013cd9f5f82f9", "content_id": "3ecbcce106886b31c0e1b160710ee52ee2e05a50", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 226, "license_type": "permissive", "max_line_length": 58, "num_lines": 9, "path": "/mbs/scripts/run.py", "repo_name": "Global19-atlassian-net/multiuserblazeserver", "src_encoding": "UTF-8", "text": "import argparse\n\nfrom mbs.app import run\n\nif __name__ == \"__main__\":\n p = argparse.ArgumentParser()\n p.add_argument('config_file', default=None, nargs=\"?\")\n args = p.parse_args()\n run(config_file=args.config_file)\n" }, { "alpha_fraction": 0.4748201370239258, "alphanum_fraction": 0.5251798629760742, "avg_line_length": 26.799999237060547, "blob_id": "bf0779fb618b733a7eb55a88cdf0b38482d32645", "content_id": "c311b5773129f0ef96378223a97c2b3cdbe8f05a", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 556, "license_type": "permissive", "max_line_length": 57, "num_lines": 20, "path": "/mbs/tests/config/config.py", "repo_name": "Global19-atlassian-net/multiuserblazeserver", "src_encoding": "UTF-8", "text": "from datetime import datetime\n\nfrom pandas import DataFrame\n\naccounts = DataFrame([['Alice', 100], ['Bob', 200]],\n columns=['name', 'amount'])\n\ncities = DataFrame([['Alice', 'NYC'], ['Bob', 'LA']],\n columns=['name', 'city'])\n\nevents = DataFrame([[1, datetime(2000, 1, 1, 12, 0, 0)],\n [2, datetime(2000, 1, 2, 12, 0, 0)]],\n columns=['value', 'when'])\n\ndata = {'accounts': accounts,\n 'cities': cities,\n 'events': events}\n\nimport tempfile\ndata_file = tempfile.mkdtemp()\n" }, { "alpha_fraction": 0.5416666865348816, "alphanum_fraction": 0.59375, "avg_line_length": 31, "blob_id": "36d4308bc27b8dd542ebfe7eaf7f09cadb094f34", "content_id": "18617d6a43a6d8af43bb203a7acb8f50df22cfec", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 96, "license_type": "permissive", "max_line_length": 51, "num_lines": 3, "path": "/mbs/tests/config/config_with_data_directory.py", "repo_name": "Global19-atlassian-net/multiuserblazeserver", "src_encoding": "UTF-8", "text": "import pandas as pd\ndata_directory = \"/tmp\"\ndata = {'data' : pd.DataFrame({'a' : [1,2,3,4,5]})}\n" }, { "alpha_fraction": 0.6345707774162292, "alphanum_fraction": 0.636310875415802, "avg_line_length": 26.365079879760742, "blob_id": "8290dc8ab52592f7082e360876cea6b6c7acd8e7", "content_id": "609ac8ac06b3155308e2ff85d2fe0a6c158bc0a2", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1724, "license_type": "permissive", "max_line_length": 80, "num_lines": 63, "path": "/mbs/client.py", "repo_name": "Global19-atlassian-net/multiuserblazeserver", "src_encoding": "UTF-8", "text": "import json\n\nimport requests\nfrom six import string_types\n\n#probably to be wrapped by some stateful client object\n\n#request session\ns = None\ndef _session():\n global s\n if s is None:\n s = requests.session()\n return s\n\ndef _reset_session():\n global s\n s = None\n\ndef sanitize_url(url):\n if not url.endswith(\"/\"):\n url += \"/\"\n return url\n\nclass MBSClientException(Exception):\n pass\n\ndef _request(method, url, data=None, files=None, headers=None, session=None):\n if session is None:\n session = _session()\n resp = session.request(method, url, data=data, files=files, headers=headers)\n if resp.status_code != 200:\n raise MBSClientException(resp.status_code)\n else:\n return resp.json()\n\ndef register(root_url, username, password, session=None):\n raise NotImplementedError\n\ndef login(root_url, username, password, session=None):\n raise NotImplementedError\n\ndef upload(root_url, file_or_path, session=None):\n url = sanitize_url(root_url) + \"upload\"\n if isinstance(file_or_path, string_types):\n with open(file_or_path, 'rb') as f:\n _request('POST', url, files={'file' : f})\n else:\n _request('POST', url, files={'file' : file_or_path})\n\ndef ls(root_url, username=None):\n root_url = sanitize_url(root_url)\n if username:\n url = root_url + \"ls/%s\" % username\n else:\n url = root_url + \"ls\"\n return _request('GET', url)\n\ndef configure(root_url, uri, _delete=False, **kwargs):\n url = sanitize_url(root_url) + \"configure\"\n data = json.dumps(dict(uri=uri, kwargs=kwargs, _delete=_delete))\n return _request('POST', url, data=data,\n headers={'content-type' : 'application/json'})\n" }, { "alpha_fraction": 0.617067813873291, "alphanum_fraction": 0.6301969289779663, "avg_line_length": 34.153846740722656, "blob_id": "75ab181bc597815709666190f45e379829b399c5", "content_id": "8bd9683b69373fa5dc44b639092a9ebddb9065f9", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 457, "license_type": "permissive", "max_line_length": 63, "num_lines": 13, "path": "/mbs/errors.py", "repo_name": "Global19-atlassian-net/multiuserblazeserver", "src_encoding": "UTF-8", "text": "# from http://flask.pocoo.org/docs/0.10/patterns/apierrors/\n\nclass ServerException(Exception):\n def __init__(self, message, status_code=500, payload=None):\n super(ServerException, self).__init__(message)\n self.status_code = status_code\n self.payload = payload\n\n def to_dict(self):\n if self.payload:\n return self.payload\n return dict(message=self.message,\n status_code=self.status_code)\n" }, { "alpha_fraction": 0.4761904776096344, "alphanum_fraction": 0.4761904776096344, "avg_line_length": 20, "blob_id": "4d0124c9775abeb527a47771701c0c45e3d9f0a2", "content_id": "57dee7130044d2d6a4e22ca200dfd311460409bb", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 42, "license_type": "permissive", "max_line_length": 20, "num_lines": 2, "path": "/README.md", "repo_name": "Global19-atlassian-net/multiuserblazeserver", "src_encoding": "UTF-8", "text": "multiuserblazeserver\n====================\n" }, { "alpha_fraction": 0.6930894255638123, "alphanum_fraction": 0.6930894255638123, "avg_line_length": 22.428571701049805, "blob_id": "477cfc20d069286507390cc821cc894e2ae31698", "content_id": "3f7fdfad7e9fbb5691b6fb86bb52483732eb28ba", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 492, "license_type": "permissive", "max_line_length": 49, "num_lines": 21, "path": "/mbs/app.py", "repo_name": "Global19-atlassian-net/multiuserblazeserver", "src_encoding": "UTF-8", "text": "import argparse\n\nfrom flask import Flask, Blueprint\n\nfrom .settings import settings\n\nmbsbp = Blueprint('mbs', 'mbs')\n\ndef setup_app(config_file=None):\n app = Flask('mbs')\n if config_file is not None:\n settings.from_pyfile(config_file)\n settings.postprocess()\n from . import views\n app.register_blueprint(mbsbp)\n return app\n\ndef run(config_file=None):\n app = setup_app(config_file=config_file)\n app.debug = True\n app.run(host=settings.ip, port=settings.port)\n" }, { "alpha_fraction": 0.7083333134651184, "alphanum_fraction": 0.7083333134651184, "avg_line_length": 15, "blob_id": "dab202de6db01d6ba4e7af15948ee516cef398b5", "content_id": "e2a42b4c8112f67608414b9b82b5d6da1dcd695e", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 48, "license_type": "permissive", "max_line_length": 24, "num_lines": 3, "path": "/conda.recipe/build.sh", "repo_name": "Global19-atlassian-net/multiuserblazeserver", "src_encoding": "UTF-8", "text": "#!/bin/bash\necho `pwd`\n$PYTHON setup.py install\n" }, { "alpha_fraction": 0.5689085125923157, "alphanum_fraction": 0.5694597363471985, "avg_line_length": 33.22641372680664, "blob_id": "3a07077d993d0f964a03dc3db66fae13d26774d0", "content_id": "f1cab608d3752e6b7d875fc4412da35c4d063c89", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3628, "license_type": "permissive", "max_line_length": 73, "num_lines": 106, "path": "/mbs/datamanager.py", "repo_name": "Global19-atlassian-net/multiuserblazeserver", "src_encoding": "UTF-8", "text": "from os.path import join, dirname, exists, relpath, isdir\nimport time\nimport os\nimport copy\nimport logging\n\nfrom werkzeug.utils import secure_filename\nfrom blaze import resource\n\nlogger = logging.getLogger(__name__)\n\nclass DataManager(object):\n def __init__(self, settings):\n self.settings = settings\n self._storage_time = 0\n self._all_datasets = None\n\n def write(self, username, filename, fileobj):\n path = self.data_path(username, filename, absolute=True)\n if not exists(dirname(path)):\n os.makedirs(dirname(path))\n with open(path, \"wb+\") as f:\n f.write(path)\n\n def ls(self, username=None):\n if username:\n users = [username]\n else:\n users = os.listdir(self.settings.data_directory)\n files = []\n for u in users:\n dirpath = self.data_path(u, \"\", absolute=True)\n if not isdir(dirpath):\n continue\n for fname in os.listdir(dirpath):\n files.append(join(u, fname))\n return files\n\n def parse(self, uri):\n protocol = datapath = None\n if '://' in uri:\n protocol, uri = uri.split('://')\n if '::' in uri:\n uri, datapath = uri.split('::')\n username = dirname(uri)\n fpath = relpath(uri, username)\n return protocol, username, fpath, datapath\n\n def data_path(self, username, filename, absolute=False):\n # TODO - invalid usernames makes these paths un-parseable\n # we should probably restrict invalid usernames\n username = secure_filename(username)\n filename = secure_filename(filename)\n if absolute:\n return join(self.settings.data_directory, username, filename)\n else:\n return join(username, filename)\n\n def configure(self, uri, **kwargs):\n self.settings.storage['_update_time'] = time.time()\n self.settings.storage[uri] = kwargs\n self.settings.storage.sync()\n\n def delete(self, uri):\n self.settings.storage['_update_time'] = time.time()\n self.settings.storage.pop(uri, None)\n self.settings.storage.sync()\n\n def resolve_resource(self, uri):\n \"\"\"parses a resource (where the file base resources are stored\n as a relative path to the data directory and resolves it\n \"\"\"\n protocol, username, fpath, datapath = self.parse(uri)\n if protocol and protocol != 'hdfstore':\n raise NotImplementedError\n path = self.data_path(username, fpath, absolute=True)\n if protocol:\n protocol = protocol + \"://\"\n else:\n protocol = \"\"\n if datapath:\n datapath = \"::\" + datapath\n else:\n datapath = \"\"\n return protocol + path + datapath\n\n def all_datasets(self):\n storage = self.settings.storage\n last_storage_time = self._storage_time\n last_change = storage.get('_update_time', 0)\n result = {}\n if self._all_datasets is None or last_storage_time < last_change:\n result = copy.copy(self.settings.data)\n for k,v in self.settings.data.items():\n result[k] = v\n for k,v in storage.items():\n if k == '_update_time':\n continue\n try:\n result[k] = resource(self.resolve_resource(k), **v)\n except Exception as e:\n logger.exception(e)\n raise\n self._all_datasets = result\n self._storage_time = last_change\n return self._all_datasets\n" }, { "alpha_fraction": 0.6894031763076782, "alphanum_fraction": 0.6942752599716187, "avg_line_length": 32.85567092895508, "blob_id": "2ab96c6af8834f9c54408bdac776e049ff968da6", "content_id": "e8e174263a17ef576961e4eff17b315124daec15", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3284, "license_type": "permissive", "max_line_length": 79, "num_lines": 97, "path": "/mbs/views.py", "repo_name": "Global19-atlassian-net/multiuserblazeserver", "src_encoding": "UTF-8", "text": "import json\nfrom os.path import join, dirname, exists, relpath\nimport os\nimport traceback\nimport logging\n\nfrom werkzeug.utils import secure_filename\nfrom flask import request, abort, jsonify\nfrom datashape import Mono, discover\nfrom datashape.predicates import iscollection\nfrom blaze.utils import json_dumps\nfrom blaze.server.server import to_tree, from_tree\nfrom blaze import into, compute\nfrom blaze.expr import Expr, Symbol, Selection, Broadcast, symbol\nfrom blaze.expr.parser import exprify\n\nfrom .app import mbsbp\nfrom .settings import settings\nfrom .errors import ServerException\n\nlogger = logging.getLogger(__name__)\n\[email protected]('/datashape')\ndef dataset():\n return str(discover(settings.datamanager.all_datasets()))\n\n\[email protected]_errorhandler(ServerException)\ndef error(e):\n response = jsonify(e.to_dict())\n response.status_code = e.status_code\n return response\n\ndef _compserver(payload):\n dataset = settings.datamanager.all_datasets()\n ns = payload.get('namespace', dict())\n\n ns[':leaf'] = symbol('leaf', discover(dataset))\n\n expr = from_tree(payload['expr'], namespace=ns)\n assert len(expr._leaves()) == 1\n leaf = expr._leaves()[0]\n\n try:\n result = compute(expr, {leaf: dataset})\n except Exception as e:\n logger.exception(e)\n msg = traceback.format_exc()\n raise ServerException(msg, status_code=500)\n return expr, result\n\[email protected]('/compute.json', methods=['POST', 'PUT', 'GET'])\n#TODO add read-only authentication checks by parsing the expr graph\ndef compserver():\n if not request.json:\n raise ServerException('Expected JSON data', status_code=404)\n payload = request.json\n expr, result = _compserver(payload)\n if iscollection(expr.dshape):\n result = into(list, result)\n return json.dumps({'datashape': str(expr.dshape),\n 'names' : expr.fields,\n 'data': result}, default=json_dumps)\n\[email protected](\"/upload\", methods=['POST'])\ndef upload():\n username = settings.auth_backend.current_username()\n f = request.files['file']\n path = settings.datamanager.data_path(username, f.filename, absolute=True)\n if not settings.auth_backend.can_write(path, username):\n return abort(403)\n if not exists (dirname(path)):\n os.makedirs(dirname(path))\n f.save(path)\n path = settings.datamanager.data_path(username, f.filename, absolute=False)\n return jsonify(path=path)\n\[email protected](\"/ls/<username>\", methods=['GET'])\[email protected](\"/ls\", methods=['GET'])\ndef ls(username=None):\n return jsonify(files=settings.datamanager.ls(username=username))\n\[email protected](\"/configure\", methods=['POST'])\ndef configure():\n kwargs = request.json['kwargs']\n uri = request.json['uri']\n delete = request.json.get('_delete', False)\n username = settings.auth_backend.current_username()\n protocol, fusername, fpath, datapath = settings.datamanager.parse(uri)\n complete_path = settings.datamanager.data_path(fusername, fpath)\n if not settings.auth_backend.can_write(complete_path, username):\n return abort(403)\n if delete:\n settings.datamanager.delete(uri.encode('utf-8'))\n else:\n settings.datamanager.configure(uri.encode('utf-8'), **kwargs)\n return jsonify(status='success')\n" }, { "alpha_fraction": 0.4931506812572479, "alphanum_fraction": 0.5616438388824463, "avg_line_length": 23.33333396911621, "blob_id": "1cb0d66f1221136a7760ae184c2e4d7ca33b8801", "content_id": "ad915c73345a1bbc0e5a6de5e96e2fbf02c2515c", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 73, "license_type": "permissive", "max_line_length": 51, "num_lines": 3, "path": "/mbs/tests/config/config_no_data_directory.py", "repo_name": "Global19-atlassian-net/multiuserblazeserver", "src_encoding": "UTF-8", "text": "import pandas as pd\n\ndata = {'data' : pd.DataFrame({'a' : [1,2,3,4,5]})}\n" }, { "alpha_fraction": 0.5935366153717041, "alphanum_fraction": 0.5980883240699768, "avg_line_length": 25.792682647705078, "blob_id": "4c82d7f55a22055107d43e60cf7ccd1752644f2f", "content_id": "a44bae8a792f62cce88a9720caf79969bd0210a2", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2197, "license_type": "permissive", "max_line_length": 68, "num_lines": 82, "path": "/mbs/settings.py", "repo_name": "Global19-atlassian-net/multiuserblazeserver", "src_encoding": "UTF-8", "text": "import os\nimport time\nfrom os.path import getmtime, exists, abspath, join\nimport shelve\nimport imp\nimport uuid\nimport copy\nimport logging\n\nfrom .auth import SingleUserAuthenticationBackend\nfrom .datamanager import DataManager\n\nlogger = logging.getLogger(__name__)\n\ndefaults = dict(\n data_directory=join(os.getcwd(), \"mbsdata\"),\n data={},\n multi_user=False,\n ip=\"0.0.0.0\",\n port=6039,\n url_prefix=\"\",\n data_file='data.db',\n auth_backend=SingleUserAuthenticationBackend(admin=True)\n)\n\nclass Settings(object):\n bp_settings = ['data_directory', 'data', 'multi_user',\n 'auth_backend', 'data_file']\n app_settings = ['ip', 'port', 'url_prefix']\n\n def __init__(self):\n self._storage = None\n self._datamanager = None\n\n def reset(self):\n for k,v in defaults.items():\n setattr(self, k, copy.copy(v))\n self.close_storage()\n\n def postprocess(self):\n self.data_directory = abspath(self.data_directory)\n if not exists(self.data_directory):\n os.makedirs(self.data_directory)\n self.data_file = abspath(self.data_file)\n\n def close_storage(self):\n if self._storage is not None:\n self._storage.close()\n self._storage = None\n self._storage_time = 0\n\n def from_pyfile(self, fname):\n name = \"_mbs_configuration\"\n mod = imp.load_source(name, fname)\n for k in self.bp_settings:\n v = getattr(mod, k, None)\n if v is not None:\n setattr(self, k, v)\n for k in self.app_settings:\n v = getattr(mod, k, None)\n if v is not None:\n setattr(self, k, v)\n self.postprocess()\n\n @property\n def storage(self):\n \"\"\"where we store metadata about uploaded datasets\n \"\"\"\n if self._storage is None:\n self._storage = shelve.open(self.data_file, protocol=-1)\n self._storage.sync()\n return self._storage\n\n @property\n def datamanager(self):\n if self._datamanager is None:\n self._datamanager = DataManager(self)\n return self._datamanager\n\nsettings = Settings()\nsettings.reset()\ndel Settings\n" }, { "alpha_fraction": 0.6463210582733154, "alphanum_fraction": 0.6463210582733154, "avg_line_length": 28.170732498168945, "blob_id": "771e08c80421a17494ef0972f0a0d3f1c2584929", "content_id": "dd95777017b9269915d31139383e9c6615276b44", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1196, "license_type": "permissive", "max_line_length": 76, "num_lines": 41, "path": "/mbs/auth.py", "repo_name": "Global19-atlassian-net/multiuserblazeserver", "src_encoding": "UTF-8", "text": "from os.path import join\n\nclass AbstractAuthentication(object):\n def current_user_name(self):\n \"\"\"obtain current user name from the current request\n current request is obtained from flask request thread local\n object\n \"\"\"\n raise NotImplementedError\n\n def login(self, username):\n \"\"\"login the user, sets whatever request information is necessary\n (usually, session['username'] = username)\n \"\"\"\n raise NotImplementedError\n\n def logout(self):\n \"\"\"logs out the user, sets whatever request information is necessary\n usually, session.pop('username')\n \"\"\"\n raise NotImplementedError\n def is_admin(self, username):\n \"\"\"Returns boolean, if the current user is an admin or not\n \"\"\"\n raise NotImplementedError\n\n def can_write(self, path, username):\n raise NotImplementedError\n\nclass SingleUserAuthenticationBackend(object):\n def __init__(self, admin=False):\n self.admin = admin\n\n def current_username(self):\n return \"defaultuser\"\n\n def is_admin(self, username):\n return self.admin\n\n def can_write(self, path, username):\n return True\n" }, { "alpha_fraction": 0.795918345451355, "alphanum_fraction": 0.795918345451355, "avg_line_length": 20, "blob_id": "b18a04e90dfb8c79974a0c11161bc8390a4a2be4", "content_id": "3856bd1c61ef945937f85861988df2571dd63bf7", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 147, "license_type": "permissive", "max_line_length": 31, "num_lines": 7, "path": "/mbs/tests/config/config_iris.py", "repo_name": "Global19-atlassian-net/multiuserblazeserver", "src_encoding": "UTF-8", "text": "from datetime import datetime\n\nfrom pandas import DataFrame\nfrom blaze.utils import example\nfrom blaze import CSV\n\ndata = CSV(example('iris.csv'))\n" } ]
17
nouret/TkinterBlockBreaker
https://github.com/nouret/TkinterBlockBreaker
113c43099ed43f39a85fc58d2fa7178fa239d1b0
1bd9ebb0bb058f2002955597a8ca18c3abc6b2b4
643711f766400ae4788f2a8c2638a23bcdb150f3
refs/heads/master
2021-01-10T05:45:39.759780
2016-04-12T16:45:20
2016-04-12T16:45:20
54,989,104
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6035066246986389, "alphanum_fraction": 0.6345739960670471, "avg_line_length": 24.598424911499023, "blob_id": "ba0fa21c2c82caa4d3feca98220bcfa978baefd5", "content_id": "6aa22e44d1c6b070cb6544fcd5ba0e4bc7341ef8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3251, "license_type": "no_license", "max_line_length": 123, "num_lines": 127, "path": "/main.py", "repo_name": "nouret/TkinterBlockBreaker", "src_encoding": "UTF-8", "text": "import tkinter\nfrom math import *\nfrom random import choice\n\nsize = 600\nXCircle = 300\nYCircle = 300\nXRectangle = 300\nr = 25\nd = 30\nDXCircle = 8\nDYCircle = 10\n\ncolors = [\"red\", \"green\", \"blue\", \"yellow\"]\n\nclass Block():\n\tdef __init__(self, X0, Y0, X1, Y1, COLOR):\n\t\tself.x0, self.y0, self.x1, self.y1, self.color = X0, Y0, X1, Y1, COLOR\n\t\tself.me = canvas.create_rectangle(X0, Y0, X1, Y1, fill=COLOR)\n\n\tdef remove(self):\n\t\tcanvas.delete(self.me)\n\n\tdef intersection(self, x, y):\n\t\tx0, y0, x1, y1 = self.x0, self.y0, self.x1, self.y1\n\t\tif x0 < x < x1:\n\t\t\tdx = 0\n\t\telse:\n\t\t\tdx = min(abs(x0 - x), abs(x1 - x))\n\t\tif y0 < y < y1:\n\t\t\tdy = 0\n\t\telse:\n\t\t\tdy = min(abs(y0 - y), abs(y1 - y))\n\t\treturn dx * dx + dy * dy < r * r\n\ndef key(event):\n\tprint(\"pressed\", event\t)\n\ndef callback(event):\n\tprint(\"clicked at\", event.x, event.y)\n\ndef function(x, y):\n\tfor i in range(x):\n\t\tprint(y)\n\ndef UpHit(x = float(\"nan\")):\n\tglobal DYCircle\n\tDYCircle = -abs(DYCircle)\n\ndef DownHit(x = float(\"nan\")):\n\tglobal DYCircle\n\tDYCircle = abs(DYCircle)\n\ndef LeftHit(x = float(\"nan\")):\n\tglobal DXCircle\n\tDXCircle = abs(DXCircle)\n\ndef RightHit(x = float(\"nan\")):\n\tglobal DXCircle\n\tDXCircle = -abs(DXCircle)\n\ndef rectanglemove(event):\n\tglobal XRectangle\n\tXRectangle = event.x\n\tif XRectangle < 3 * r:\n\t\tXRectangle = 3 * r\n\tif XRectangle > size - 3 * r:\n\t\tXRectangle = size - 3 * r\n\tcanvas.coords(rectangle, XRectangle - 3 * r, size, XRectangle + 3 * r, size - d)\n\tcanvas.update()\n\ndef move():\n\tglobal XCircle, YCircle, XRectangle, DXCircle, DYCircle, blocks\n\tXCircle += DXCircle\n\tYCircle += DYCircle\n\tcanvas.coords(circle, XCircle - r, YCircle - r, XCircle + r, YCircle + r)\n\tif YCircle <= r:\n\t\tDownHit()\n\tif size - r <= YCircle and not(XRectangle - 3 * r <= XCircle <= XRectangle + 3 * r and size - r - d <= YCircle):\n\t\tUpHit()\n\tif (XRectangle - 3 * r <= XCircle <= XRectangle + 3 * r and size - r - d <= YCircle):\n\t\talpha = (XCircle - XRectangle + 3 * r) * pi / (6 * r)\n\t\tDXCircle, DYCircle = -cos(alpha) * sqrt(DXCircle ** 2 + DYCircle ** 2), -sin(alpha) * sqrt(DXCircle ** 2 + DYCircle ** 2)\n\tif XCircle <= r:\n\t\tLeftHit()\n\tif size - r <= XCircle:\n\t\tRightHit()\n\tdelete = set()\n\tfor block in blocks:\n\t\tif block.intersection(XCircle, YCircle):\n\t\t\tdelete |= {block}\n\t\t\tblock.remove()\n\t\t\tDownHit()\n\tfor block in delete:\n\t\tblocks.remove(block)\n\tcanvas.update()\n\tcanvas.after(50, move)\n\nroot = tkinter.Tk()\ncanvas = tkinter.Canvas(root, width=size, height=size)\ncanvas.bind(\"<Motion>\", rectanglemove)\n\ncanvas.pack()\ncircle = canvas.create_oval(XCircle-r, YCircle-r, XCircle+r, YCircle+r, fill='red')\nrectangle = canvas.create_rectangle(XRectangle - 3 * r, size, XRectangle + 3 * r, size - d, fill = \"green\")\nhelpcircle = canvas.create_oval(10, 20, 20, 30, fill='green')\n\nblocks = []\nfor x in range(0, size, 60):\n\tfor y in (0, 30, 60):\n\t\tblocks += [(Block(x + 1, y + 1, x + 59, y + 29, choice(colors)))]\n\nroot.after(50, move)\n\n\ndef f(x):\n\tglobal DXCircle, DYCircle, XCircle, YCircle\n\tv = sqrt(DXCircle ** 2 + DYCircle ** 2)\n\tprint(v)\n\tn = sqrt((XRectangle - XCircle) ** 2 + (size - d - YCircle) ** 2)\n\tprint(XCircle, XRectangle, YCircle, d)\n\tDXCircle, DYCircle = (-XCircle + XRectangle) / n * v, (size - d - YCircle) / n * v\n\nroot.bind(\"<space>\", f)\n\n#print(help(canvas.bind))\nroot.mainloop()\n" } ]
1